Example #1
0
class BridgeDamage(BaseAnalysis):
    """Computes bridge structural damage for earthquake, tsunami, tornado, and hurricane hazards.

    Args:
        incore_client (IncoreClient): Service authentication.

    """
    def __init__(self, incore_client):
        self.hazardsvc = HazardService(incore_client)
        self.fragilitysvc = FragilityService(incore_client)

        super(BridgeDamage, self).__init__(incore_client)

    def run(self):
        """Executes bridge damage analysis."""
        # Bridge dataset
        bridge_set = self.get_input_dataset("bridges").get_inventory_reader()

        # Get hazard input
        hazard_type = self.get_parameter("hazard_type")
        hazard_dataset_id = self.get_parameter("hazard_id")
        user_defined_cpu = 1

        if not self.get_parameter("num_cpu") is None and self.get_parameter(
                "num_cpu") > 0:
            user_defined_cpu = self.get_parameter("num_cpu")

        num_workers = AnalysisUtil.determine_parallelism_locally(
            self, len(bridge_set), user_defined_cpu)

        avg_bulk_input_size = int(len(bridge_set) / num_workers)
        inventory_args = []
        count = 0
        inventory_list = list(bridge_set)
        while count < len(inventory_list):
            inventory_args.append(inventory_list[count:count +
                                                 avg_bulk_input_size])
            count += avg_bulk_input_size

        (ds_results, damage_results) = self.bridge_damage_concurrent_future(
            self.bridge_damage_analysis_bulk_input, num_workers,
            inventory_args, repeat(hazard_type), repeat(hazard_dataset_id))

        self.set_result_csv_data("result",
                                 ds_results,
                                 name=self.get_parameter("result_name"))
        self.set_result_json_data("metadata",
                                  damage_results,
                                  name=self.get_parameter("result_name") +
                                  "_additional_info")

        return True

    def bridge_damage_concurrent_future(self, function_name, num_workers,
                                        *args):
        """Utilizes concurrent.future module.

        Args:
            function_name (function): The function to be parallelized.
            num_workers (int): Maximum number workers in parallelization.
            *args: All the arguments in order to pass into parameter function_name.

        Returns:
            list: A list of ordered dictionaries with bridge damage values and other data/metadata.

        """
        output_ds = []
        output_dmg = []
        with concurrent.futures.ProcessPoolExecutor(
                max_workers=num_workers) as executor:
            for ret1, ret2 in executor.map(function_name, *args):
                output_ds.extend(ret1)
                output_dmg.extend(ret2)

        return output_ds, output_dmg

    def bridge_damage_analysis_bulk_input(self, bridges, hazard_type,
                                          hazard_dataset_id):
        """Run analysis for multiple bridges.

        Args:
            bridges (list): Multiple bridges from input inventory set.
            hazard_type (str): Hazard type, either earthquake, tornado, tsunami, or hurricane.
            hazard_dataset_id (str): An id of the hazard exposure.

        Returns:
            list: A list of ordered dictionaries with bridge damage values and other data/metadata.

        """
        # Get Fragility key
        fragility_key = self.get_parameter("fragility_key")
        if fragility_key is None:
            fragility_key = BridgeUtil.DEFAULT_TSUNAMI_HMAX_FRAGILITY_KEY if hazard_type == 'tsunami' else \
                BridgeUtil.DEFAULT_FRAGILITY_KEY
            self.set_parameter("fragility_key", fragility_key)

        # Hazard Uncertainty
        use_hazard_uncertainty = False
        if hazard_type == "earthquake" and self.get_parameter(
                "use_hazard_uncertainty") is not None:
            use_hazard_uncertainty = self.get_parameter(
                "use_hazard_uncertainty")

        # Liquefaction
        use_liquefaction = False
        if hazard_type == "earthquake" and self.get_parameter(
                "use_liquefaction") is not None:
            use_liquefaction = self.get_parameter("use_liquefaction")

        fragility_set = self.fragilitysvc.match_inventory(
            self.get_input_dataset("dfr3_mapping_set"), bridges, fragility_key)

        values_payload = []
        unmapped_bridges = []
        mapped_bridges = []
        for b in bridges:
            bridge_id = b["id"]
            if bridge_id in fragility_set:
                location = GeoUtil.get_location(b)
                loc = str(location.y) + "," + str(location.x)

                demands = fragility_set[bridge_id].demand_types
                units = fragility_set[bridge_id].demand_units
                value = {"demands": demands, "units": units, "loc": loc}
                values_payload.append(value)
                mapped_bridges.append(b)

            else:
                unmapped_bridges.append(b)

        # not needed anymore as they are already split into mapped and unmapped
        del bridges

        if hazard_type == 'earthquake':
            hazard_vals = self.hazardsvc.post_earthquake_hazard_values(
                hazard_dataset_id, values_payload)
        elif hazard_type == 'tornado':
            hazard_vals = self.hazardsvc.post_tornado_hazard_values(
                hazard_dataset_id, values_payload)
        elif hazard_type == 'tsunami':
            hazard_vals = self.hazardsvc.post_tsunami_hazard_values(
                hazard_dataset_id, values_payload)
        elif hazard_type == 'hurricane':
            hazard_vals = self.hazardsvc.post_hurricane_hazard_values(
                hazard_dataset_id, values_payload)
        elif hazard_type == 'flood':
            hazard_vals = self.hazardsvc.post_flood_hazard_values(
                hazard_dataset_id, values_payload)
        else:
            raise ValueError(
                "The provided hazard type is not supported yet by this analysis"
            )

        ds_results = []
        damage_results = []

        i = 0
        for bridge in mapped_bridges:
            ds_result = dict()
            damage_result = dict()
            dmg_probability = dict()
            dmg_intervals = dict()
            selected_fragility_set = fragility_set[bridge["id"]]

            if isinstance(selected_fragility_set.fragility_curves[0],
                          DFR3Curve):
                # Supports multiple demand types in same fragility
                hazard_val = AnalysisUtil.update_precision_of_lists(
                    hazard_vals[i]["hazardValues"])
                input_demand_types = hazard_vals[i]["demands"]
                input_demand_units = hazard_vals[i]["units"]

                hval_dict = dict()
                j = 0
                for d in selected_fragility_set.demand_types:
                    hval_dict[d] = hazard_val[j]
                    j += 1

                if not AnalysisUtil.do_hazard_values_have_errors(
                        hazard_vals[i]["hazardValues"]):
                    bridge_args = selected_fragility_set.construct_expression_args_from_inventory(
                        bridge)
                    dmg_probability = \
                        selected_fragility_set.calculate_limit_state(hval_dict,
                                                                     inventory_type="bridge",
                                                                     **bridge_args)
                    dmg_intervals = selected_fragility_set.calculate_damage_interval(
                        dmg_probability,
                        hazard_type=hazard_type,
                        inventory_type="bridge")
            else:
                raise ValueError(
                    "One of the fragilities is in deprecated format. This should not happen. If you are "
                    "seeing this please report the issue.")

            retrofit_cost = BridgeUtil.get_retrofit_cost(fragility_key)
            retrofit_type = BridgeUtil.get_retrofit_type(fragility_key)

            ds_result['guid'] = bridge['properties']['guid']
            ds_result.update(dmg_probability)
            ds_result.update(dmg_intervals)
            ds_result[
                'haz_expose'] = AnalysisUtil.get_exposure_from_hazard_values(
                    hazard_val, hazard_type)

            damage_result['guid'] = bridge['properties']['guid']
            damage_result['fragility_id'] = selected_fragility_set.id
            damage_result["retrofit"] = retrofit_type
            damage_result["retrocost"] = retrofit_cost
            damage_result["demandtypes"] = input_demand_types
            damage_result["demandunits"] = input_demand_units
            damage_result["hazardtype"] = hazard_type
            damage_result["hazardval"] = hazard_val

            # add spans to bridge output so mean damage calculation can use that info
            if "spans" in bridge["properties"] and bridge["properties"][
                    "spans"] is not None:
                if isinstance(bridge["properties"]["spans"],
                              str) and bridge["properties"]["spans"].isdigit():
                    damage_result['spans'] = int(bridge["properties"]["spans"])
                elif isinstance(bridge["properties"]["spans"], int):
                    damage_result['spans'] = bridge["properties"]["spans"]
            elif "SPANS" in bridge["properties"] and bridge["properties"][
                    "SPANS"] is not None:
                if isinstance(bridge["properties"]["SPANS"],
                              str) and bridge["properties"]["SPANS"].isdigit():
                    damage_result['SPANS'] = int(bridge["properties"]["SPANS"])
                elif isinstance(bridge["properties"]["SPANS"], int):
                    damage_result['SPANS'] = bridge["properties"]["SPANS"]
            else:
                damage_result['spans'] = 1

            ds_results.append(ds_result)
            damage_results.append(damage_result)
            i += 1

        for bridge in unmapped_bridges:
            ds_result = dict()
            damage_result = dict()

            ds_result['guid'] = bridge['properties']['guid']

            damage_result['guid'] = bridge['properties']['guid']
            damage_result["retrofit"] = None
            damage_result["retrocost"] = None
            damage_result["demandtypes"] = None
            damage_result['demandunits'] = None
            damage_result["hazardtype"] = None
            damage_result['hazardval'] = None
            damage_result['spans'] = None

            ds_results.append(ds_result)
            damage_results.append(damage_result)

        return ds_results, damage_results

    def get_spec(self):
        """Get specifications of the bridge damage analysis.

        Returns:
            obj: A JSON object of specifications of the bridge damage analysis.

        """
        return {
            'name':
            'bridge-damage',
            'description':
            'bridge damage analysis',
            'input_parameters': [
                {
                    'id': 'result_name',
                    'required': True,
                    'description': 'result dataset name',
                    'type': str
                },
                {
                    'id': 'hazard_type',
                    'required': True,
                    'description': 'Hazard Type (e.g. earthquake)',
                    'type': str
                },
                {
                    'id': 'hazard_id',
                    'required': True,
                    'description': 'Hazard ID',
                    'type': str
                },
                {
                    'id': 'fragility_key',
                    'required': False,
                    'description': 'Fragility key to use in mapping dataset',
                    'type': str
                },
                {
                    'id': 'use_liquefaction',
                    'required': False,
                    'description': 'Use liquefaction',
                    'type': bool
                },
                {
                    'id': 'use_hazard_uncertainty',
                    'required': False,
                    'description': 'Use hazard uncertainty',
                    'type': bool
                },
                {
                    'id': 'num_cpu',
                    'required': False,
                    'description':
                    'If using parallel execution, the number of cpus to request',
                    'type': int
                },
            ],
            'input_datasets': [{
                'id':
                'bridges',
                'required':
                True,
                'description':
                'Bridge Inventory',
                'type':
                ['ergo:bridges', 'ergo:bridgesVer2', 'ergo:bridgesVer3'],
            }, {
                'id': 'dfr3_mapping_set',
                'required': True,
                'description': 'DFR3 Mapping Set Object',
                'type': ['incore:dfr3MappingSet'],
            }],
            'output_datasets': [{
                'id': 'result',
                'parent_type': 'bridges',
                'description': 'CSV file of bridge structural damage',
                'type': 'ergo:bridgeDamageVer3'
            }, {
                'id':
                'metadata',
                'parent_type':
                'bridges',
                'description':
                'additional metadata in json file about applied hazard value and '
                'fragility',
                'type':
                'incore:bridgeDamageSupplement'
            }]
        }
Example #2
0
class TornadoEpnDamage(BaseAnalysis):
    """
    Computes electric power network (EPN) probability of damage based on a tornado hazard.
    The process for computing the structural damage is similar to other parts of the built environment.
    First, fragilities are obtained based on the hazard type and attributes of the network tower and network pole.
    Based on the fragility, the hazard intensity at the location of the infrastructure is computed. Using this
    information, the probability of exceeding each limit state is computed, along with the probability of damage.
    """
    def __init__(self, incore_client):
        self.hazardsvc = HazardService(incore_client)
        self.fragilitysvc = FragilityService(incore_client)
        self.datasetsvc = DataService(incore_client)
        self.fragility_tower_id = '5b201b41b1cf3e336de8fa67'
        self.fragility_pole_id = '5b201d91b1cf3e336de8fa68'

        # this is for deciding to use indpnode field. Not using this could be safer for general dataset
        self.use_indpnode = False
        self.nnode = 0
        self.highest_node_num = 0
        self.EF = 0
        self.nint = []
        self.indpnode = []
        self.mcost = 1435  # mean repair cost for single distribution pole
        self.vcost = (0.1 * self.mcost)**2
        self.sigmad = math.sqrt(math.log(
            self.vcost / (self.mcost**2) +
            1))  # convert to gaussian Std Deviation to be used in logncdf
        self.mud = math.log(
            (self.mcost**2) / math.sqrt(self.vcost + self.mcost**2))

        self.mcost = 400000  # mean repair cost for single transmission pole
        self.vcost = (0.1 * self.mcost)**2
        self.sigmat = math.sqrt(math.log(
            self.vcost / (self.mcost**2) +
            1))  # convert to gaussian Std Deviation to be used in logncdf
        self.mut = math.log(
            (self.mcost**2) / math.sqrt(self.vcost + self.mcost**2))

        self.tmut = 72  # mean repairtime for transmission tower in hrs
        self.tsigmat = 36  # std dev

        self.tmud = 5  # mean repairtime for poles in hrs
        self.tsigmad = 2.5

        self.totalcost2repairpath = []
        self.totalpoles2repair = []

        self.tornado_sim_field_name = 'SIMULATION'
        self.tornado_ef_field_name = 'EF_RATING'

        # tornado number of simulation and ef_rate
        self.nmcs = 0
        self.tornado_ef_rate = 0

        self.pole_distance = 38.1

        # node variables
        self.nodenwid_fld_name = "NODENWID"
        self.indpnode_fld_name = "INDPNODE"
        self.guid_fldname = 'GUID'

        # link variables
        self.tonode_fld_name = "TONODE"
        self.fromnode_fld_name = "FROMNODE"
        self.linetype_fld_name = "LINETYPE"

        # line type variable
        self.line_transmission = "transmission"
        self.line_distribution = "distribution"

        super(TornadoEpnDamage, self).__init__(incore_client)

    def run(self):
        node_dataset = self.get_input_dataset(
            "epn_node").get_inventory_reader()
        link_dataset = self.get_input_dataset(
            "epn_link").get_inventory_reader()
        tornado_id = self.get_parameter('tornado_id')
        tornado_metadata = self.hazardsvc.get_tornado_hazard_metadata(
            tornado_id)
        self.load_remote_input_dataset("tornado",
                                       tornado_metadata["datasetId"])
        tornado_dataset = self.get_input_dataset(
            "tornado").get_inventory_reader()
        ds_results, damage_results = self.get_damage(node_dataset,
                                                     link_dataset,
                                                     tornado_dataset,
                                                     tornado_id)

        self.set_result_csv_data("result",
                                 ds_results,
                                 name=self.get_parameter("result_name"))
        self.set_result_json_data("metadata",
                                  damage_results,
                                  name=self.get_parameter("result_name") +
                                  "_additional_info")

        return True

    def get_damage(self, node_dataset, link_dataset, tornado_dataset,
                   tornado_id):
        """

        Args:
            node_dataset (obj): Node dataset.
            link_dataset (obj): Link dataset.
            tornado_dataset (obj): Tornado dataset.
            tornado_id (str): Tornado id.

        """
        self.set_tornado_variables(tornado_dataset)
        self.set_node_variables(node_dataset)

        # get fragility curves set - tower for transmission, pole for distribution
        fragility_set_tower = FragilityCurveSet(
            self.fragilitysvc.get_dfr3_set(self.fragility_tower_id))
        assert fragility_set_tower.id == self.fragility_tower_id
        fragility_set_pole = FragilityCurveSet(
            self.fragilitysvc.get_dfr3_set(self.fragility_pole_id))
        assert fragility_set_pole.id == self.fragility_pole_id

        # network test
        node_id_validation = NetworkUtil.validate_network_node_ids(
            node_dataset, link_dataset, self.fromnode_fld_name,
            self.tonode_fld_name, self.nodenwid_fld_name)
        if node_id_validation is False:
            print(
                "ID in from or to node field doesn't exist in the node dataset"
            )
            os.exit(0)

        # getting network graph and node coordinates
        is_directed_graph = True

        graph, node_coords = NetworkUtil.create_network_graph_from_field(
            link_dataset, self.fromnode_fld_name, self.tonode_fld_name,
            is_directed_graph)

        # reverse the graph to acculate the damage to next to node
        graph = nx.DiGraph.reverse(graph, copy=True)

        # check the connection as a list
        connection_sets = []
        if is_directed_graph:
            connection_sets = list(nx.weakly_connected_components(graph))
        else:
            connection_sets = list(nx.connected_components(graph))

        # check the first node of the each network line, this first node should lead each separated network
        # also convert connection set to list
        first_node_list = []
        connection_list = []
        for c in connection_sets:
            connection_list.append(list(c))
            first_node_list.append(list(c)[0])

        intersection_list = []
        poly_list = []
        totalcost2repair = []
        totalpoles2repair = []
        totaltime2repair = []

        # construct guid field
        guid_list = []
        nodenwid_list = []
        for node_feature in node_dataset:
            # get guid colum
            guid_fld_val = ''
            if self.guid_fldname.lower() in node_feature['properties']:
                guid_fld_val = node_feature['properties'][
                    self.guid_fldname.lower()]
            elif self.guid_fldname in node_feature['properties']:
                guid_fld_val = node_feature['properties'][self.guid_fldname]
            guid_list.append(guid_fld_val)

            # get nodenwid colum
            nodenwid_fld_val = ''
            if self.nodenwid_fld_name.lower() in node_feature['properties']:
                nodenwid_fld_val = int(
                    node_feature['properties'][self.nodenwid_fld_name.lower()])
            elif self.nodenwid_fld_name in node_feature['properties']:
                nodenwid_fld_val = int(
                    node_feature['properties'][self.nodenwid_fld_name])
            nodenwid_list.append(nodenwid_fld_val)

        for z in range(self.nmcs):
            nodedam = [
                0
            ] * self.nnode  # placeholder for recording number of damaged pole for each node
            noderepair = [
                0
            ] * self.nnode  # placeholder for recording repair cost for each node
            poles2repair = [
                0
            ] * self.nnode  # placeholder for recording total number of poles to repair
            cost2repairpath = [
                0
            ] * self.nnode  # placeholder for recording total repair cost for the network
            time2repairpath = [
                0
            ] * self.nnode  # placeholder for recording total repair time for the network
            nodetimerep = [0] * self.nnode
            hazardval = [[
                0
            ]] * self.nnode  # placeholder for recording hazard values
            demandtypes = [[
                ""
            ]] * self.nnode  # placeholder for recording demand types
            demandunits = [[
                ""
            ]] * self.nnode  # placeholder for recording demand units

            # iterate link
            for line_feature in link_dataset:
                ndamage = 0  # number of damaged poles in each link
                repaircost = 0  # repair cost value
                repairtime = 0  # repair time value
                to_node_val = ""
                linetype_val = ""
                tor_hazard_values = [0]  # random wind speed in EF
                demand_types = [""]
                demand_units = [""]

                if self.tonode_fld_name.lower() in line_feature['properties']:
                    to_node_val = line_feature['properties'][
                        self.tonode_fld_name.lower()]
                elif self.tonode_fld_name in line_feature['properties']:
                    to_node_val = line_feature['properties'][
                        self.tonode_fld_name]

                if self.linetype_fld_name in line_feature['properties']:
                    linetype_val = line_feature['properties'][
                        self.linetype_fld_name]
                elif self.linetype_fld_name.lower(
                ) in line_feature['properties']:
                    linetype_val = line_feature['properties'][
                        self.linetype_fld_name.lower()]

                line = shape(line_feature['geometry'])

                # iterate tornado
                for tornado_feature in tornado_dataset:
                    resistivity_probability = 0  # resistivity value at the point of windSpeed
                    random_resistivity = 0  # random resistivity value between 0 and one

                    sim_fld_val = ""
                    ef_fld_val = ""

                    # get EF rating and simulation number column
                    if self.tornado_sim_field_name.lower(
                    ) in tornado_feature['properties']:
                        sim_fld_val = int(tornado_feature['properties'][
                            self.tornado_sim_field_name.lower()])
                    elif self.tornado_sim_field_name in tornado_feature[
                            'properties']:
                        sim_fld_val = int(tornado_feature['properties'][
                            self.tornado_sim_field_name])

                    if self.tornado_ef_field_name.lower(
                    ) in tornado_feature['properties']:
                        ef_fld_val = tornado_feature['properties'][
                            self.tornado_ef_field_name.lower()]
                    elif self.tornado_ef_field_name in tornado_feature[
                            'properties']:
                        ef_fld_val = tornado_feature['properties'][
                            self.tornado_ef_field_name]

                    if sim_fld_val == "" or ef_fld_val == "":
                        print(
                            "unable to convert tornado simulation field value to integer"
                        )
                        sys.exit(0)

                    # get Tornado EF polygon
                    # assumes that the polygon is not a multipolygon
                    poly = shape(tornado_feature['geometry'])
                    poly_list.append(poly)

                    # loop for ef ranges
                    for f in range(self.tornado_ef_rate):
                        npoles = 0  # number of poles in tornado ef box
                        poleresist = 0  # pole's resistance value
                        # setting EF rate value string to match in the tornado dataset's attribute table
                        ef_content = "EF" + str(f)

                        # compute the intersections between link line and ef polygon
                        # also figure out the length of the line that ovelapped with EF box

                        # compute the intersection between tornado polygon and line
                        if sim_fld_val == z and ef_fld_val.lower(
                        ) == ef_content.lower():
                            if poly is not None and line is not None:
                                if poly.intersects(line):
                                    intersection = poly.intersection(line)
                                    any_point = None
                                    intersection_length = intersection.length
                                    if intersection.length > 0:
                                        # print(intersection.__class__.__name__)
                                        # calculate the length of intersected line
                                        # since this is a geographic, it has to be projected to meters to be calcuated
                                        inter_length_meter = GeoUtil.calc_geog_distance_from_linestring(
                                            intersection)
                                        if isinstance(intersection,
                                                      MultiLineString):
                                            intersection_list.append(
                                                intersection)
                                            for inter_line in intersection.geoms:
                                                any_point = inter_line.centroid
                                                break
                                        elif isinstance(
                                                intersection, LineString):
                                            intersection_list.append(
                                                intersection)
                                            any_point = intersection.centroid

                                            # also, random point can be possible
                                            # by changing the following lines value 0.5
                                            # any_point = intersection.interpolate(0.5, normalized=True)

                                    if any_point is not None:
                                        # check if any_point is in the polygon
                                        if poly.contains(any_point) is False:
                                            # this is very hardly happen but should be needed just in case
                                            any_point = poly.centroid

                                    # check if the line is tower or transmission
                                    if linetype_val.lower(
                                    ) == self.line_transmission:
                                        fragility_set_used = fragility_set_tower
                                    else:
                                        fragility_set_used = fragility_set_pole

                                    values_payload = [{
                                        "demands": [
                                            x.lower() for x in
                                            fragility_set_used.demand_types
                                        ],
                                        "units": [
                                            x.lower() for x in
                                            fragility_set_used.demand_units
                                        ],
                                        "loc":
                                        str(any_point.coords[0][1]) + "," +
                                        str(any_point.coords[0][0])
                                    }]

                                    h_vals = self.hazardsvc.post_tornado_hazard_values(
                                        tornado_id, values_payload,
                                        self.get_parameter('seed'))
                                    tor_hazard_values = AnalysisUtil.update_precision_of_lists(
                                        h_vals[0]["hazardValues"])
                                    demand_types = h_vals[0]["demands"]
                                    demand_units = h_vals[0]["units"]
                                    hval_dict = dict()
                                    j = 0
                                    for d in h_vals[0]["demands"]:
                                        hval_dict[d] = tor_hazard_values[j]
                                        j += 1
                                    if isinstance(
                                            fragility_set_used.
                                            fragility_curves[0], DFR3Curve):
                                        inventory_args = fragility_set_used.construct_expression_args_from_inventory(
                                            tornado_feature)
                                        resistivity_probability = \
                                            fragility_set_used.calculate_limit_state(
                                                hval_dict,
                                                inventory_type=fragility_set_used.inventory_type, **inventory_args)
                                    else:
                                        raise ValueError(
                                            "One of the fragilities is in deprecated format. This should not happen. "
                                            "If you are seeing this please report the issue."
                                        )

                                    # randomly generated capacity of each poles ; 1 m/s is 2.23694 mph
                                    poleresist = resistivity_probability.get(
                                        'LS_0') * 2.23694
                                    npoles = int(
                                        round(inter_length_meter /
                                              self.pole_distance))
                                    repairtime_list = []

                                    for k in range(npoles):
                                        repair_time = 0
                                        random_resistivity = random.uniform(
                                            0, 1)

                                        if random_resistivity <= poleresist:
                                            ndamage += 1
                                            # following codes can't be converted from matlab to python
                                            # however, the cross product <=3 or == 24 almost doesn't happen
                                            # since the time and cost differs when it is pole or tower,
                                            # this could be changed by see if it is tower or pole
                                            # if numpy.cross(k, z) <= 3 or numpy.cross(k, z) == 24:
                                            if linetype_val.lower(
                                            ) == self.line_transmission:
                                                mu = self.mut
                                                sigma = self.sigmat
                                                tmu = self.tmut
                                                tsigma = self.tsigmat
                                            else:
                                                mu = self.mud
                                                sigma = self.sigmad
                                                tmu = self.tmud
                                                tsigma = self.tsigmad

                                            repairtime_list.append(
                                                numpy.random.normal(
                                                    tmu, tsigma))

                                    for k in range(ndamage):
                                        repaircost += numpy.random.lognormal(
                                            mu, sigma)

                                    # max of the repair time among different poles is taken
                                    # as the repair time for that line
                                    if len(repairtime_list) > 0:
                                        repairtime = max(repairtime_list)
                noderepair[to_node_val - 1] = repaircost
                nodedam[to_node_val - 1] = ndamage
                nodetimerep[to_node_val - 1] = repairtime
                hazardval[to_node_val - 1] = tor_hazard_values
                demandtypes[to_node_val - 1] = demand_types
                demandunits[to_node_val - 1] = demand_units

            # Calculate damage and repair cost based on network
            for i in range(len(first_node_list)):
                for j in range(len(connection_list[i])):
                    # print(connection_list[i][j], first_node_list[i])
                    pathij = list(
                        nx.all_simple_paths(graph, connection_list[i][j],
                                            first_node_list[i]))
                    poler = 0
                    coster = 0
                    timer = []
                    # print(pathij)
                    if len(pathij) > 0:
                        for k in range(len(pathij)):
                            for var1 in range(len(pathij[k])):
                                poler = poler + nodedam[pathij[k][var1]]
                                coster = coster + noderepair[pathij[k][var1]]
                                # max of the time for different lines is taken as the repair time for that path.
                                # -- path is constituted of different lines.
                                timer.append(nodetimerep[pathij[k][var1]])
                    poles2repair[connection_list[i][j]] = poler
                    cost2repairpath[connection_list[i][j]] = coster
                    if len(timer) > 0:
                        time2repairpath[connection_list[i][j]] = max(timer)
                    else:
                        time2repairpath[connection_list[i][j]] = 0
            totalcost2repair.append(cost2repairpath)
            totalpoles2repair.append(poles2repair)
            totaltime2repair.append(time2repairpath)

        # create guid field from node dataset

        # calculate mean and standard deviation
        meanpoles = numpy.mean(numpy.asarray(totalpoles2repair), axis=0)
        stdpoles = numpy.std(numpy.asarray(totalpoles2repair), axis=0)
        meancost = numpy.mean(numpy.asarray(totalcost2repair), axis=0)
        stdcost = numpy.std(numpy.asarray(totalcost2repair), axis=0)
        meantime = numpy.mean(numpy.asarray(totaltime2repair), axis=0)
        stdtime = numpy.std(numpy.asarray(totaltime2repair), axis=0)

        # create result
        ds_results = []
        damage_results = []

        for i in range(len(meanpoles)):
            ds_result = dict()
            damage_result = dict()

            ds_result['guid'] = guid_list[i]
            ds_result["meanpoles"] = meanpoles[i]
            ds_result["stdpoles"] = stdpoles[i]
            ds_result["meancost"] = meancost[i]
            ds_result["stdcost"] = stdcost[i]
            ds_result["meantime"] = meantime[i]
            ds_result["stdtime"] = stdtime[i]
            ds_result[
                'haz_expose'] = AnalysisUtil.get_exposure_from_hazard_values(
                    hazardval[i], "tornado")

            damage_result['guid'] = guid_list[i]
            damage_result["fragility_tower_id"] = self.fragility_tower_id
            damage_result["fragility_pole_id"] = self.fragility_pole_id
            damage_result["hazardtype"] = "Tornado"
            damage_result['hazardvals'] = hazardval[i]
            damage_result['demandtypes'] = demandtypes[i]
            damage_result['demandunits'] = demandunits[i]

            ds_results.append(ds_result)
            damage_results.append(damage_result)

        return ds_results, damage_results

    """
    align coordinate values in a list as a single pair in order
    """

    def align_list_cooridnate(self, coord_list):
        coord_iterator = iter(coord_list)
        first = prev = next(coord_iterator)
        for coord in coord_iterator:
            yield prev, coord
            prev = coord

            # if it is polygon the following line is needed to close the polygon geometry
            # yield coord, first

    def set_tornado_variables(self, tornado_dataset):
        sim_num_list = []
        ef_rate_list = []

        for ef_poly in tornado_dataset:
            ef_string = ''
            if self.tornado_sim_field_name.lower() in ef_poly['properties']:
                sim_num_list.append(
                    int(ef_poly['properties'][
                        self.tornado_sim_field_name.lower()]))
            elif self.tornado_sim_field_name in ef_poly['properties']:
                sim_num_list.append(
                    int(ef_poly['properties'][self.tornado_sim_field_name]))

            if self.tornado_ef_field_name.lower() in ef_poly['properties']:
                ef_string = ef_poly['properties'][
                    self.tornado_ef_field_name.lower()]
            elif self.tornado_ef_field_name in ef_poly['properties']:
                ef_string = ef_poly['properties'][self.tornado_ef_field_name]
            # parse the number in EF and the format should be "EF0", "EF1", or something like it
            ef_rate_list.append(int(ef_string.lower().split("ef", 1)[1]))

        if len(sim_num_list) == 0 or len(ef_string) == 0:
            print("Could not convert tornado simulation value")
            sys.exit(0)

        self.nmcs = max(sim_num_list) + 1
        self.tornado_ef_rate = max(ef_rate_list) + 1

    def set_node_variables(self, node_dataset):
        i = 0

        for node_point in node_dataset:
            node_id = None
            indpnode_val = None
            if self.nodenwid_fld_name.lower() in node_point['properties']:
                node_id = int(
                    node_point['properties'][self.nodenwid_fld_name.lower()])
            elif self.nodenwid_fld_name in node_point['properties']:
                node_id = int(node_point['properties'][self.nodenwid_fld_name])

            if self.use_indpnode is True:
                if self.indpnode_fld_name.lower() in node_point['properties']:
                    indpnode_val = int(node_point['properties'][
                        self.indpnode_fld_name.lower()])
                elif self.indpnode_fld_name in node_point['properties']:
                    indpnode_val = int(
                        node_point['properties'][self.indpnode_fld_name])

            if node_id is None and indpnode_val is None:
                print("problem getting the value")
                sys.exit(1)

            if self.use_indpnode is True:
                if indpnode_val > 0:
                    self.indpnode.append(node_id)
                else:
                    self.nint.append(node_id)
            else:
                self.nint.append(node_id)

            if node_id > self.highest_node_num:
                self.highest_node_num = node_id
            i += 1

        self.nnode = i

    def get_spec(self):
        return {
            'name':
            'tornado-epn-damage',
            'description':
            'tornado epn damage analysis',
            'input_parameters': [{
                'id': 'result_name',
                'required': True,
                'description': 'result dataset name',
                'type': str
            }, {
                'id': 'tornado_id',
                'required': True,
                'description': 'Tornado hazard id',
                'type': str
            }, {
                'id': 'seed',
                'required': False,
                'description': 'Initial seed for the tornado hazard value',
                'type': int
            }],
            'input_datasets': [{
                'id': 'epn_node',
                'required': True,
                'description': 'EPN Node',
                'type': ['incore:epnNodeVer1'],
            }, {
                'id': 'epn_link',
                'required': True,
                'description': 'EPN Link',
                'type': ['incore:epnLinkeVer1'],
            }, {
                'id': 'tornado',
                'required': False,
                'description': 'Tornado Dataset',
                'type': ['incore:tornadoWindfield'],
            }],
            'output_datasets': [{
                'id': 'result',
                'parent_type': 'epn_node',
                'description':
                'CSV file of damages for electric power network by tornado',
                'type': 'incore:tornadoEPNDamageVer3'
            }, {
                'id': 'metadata',
                'parent_type': 'epn_node',
                'description':
                'Json file with information about applied hazard value and fragility',
                'type': 'incore:tornadoEPNDamageSupplement'
            }]
        }
Example #3
0
class BuildingDamage(BaseAnalysis):
    """Building Damage Analysis calculates the probability of building damage based on
    different hazard type such as earthquake, tsunami, and tornado.

    Args:
        incore_client (IncoreClient): Service authentication.

    """

    def __init__(self, incore_client):
        self.hazardsvc = HazardService(incore_client)
        self.fragilitysvc = FragilityService(incore_client)

        super(BuildingDamage, self).__init__(incore_client)

    def run(self):
        """Executes building damage analysis."""
        # Building dataset
        bldg_set = self.get_input_dataset("buildings").get_inventory_reader()

        # building retrofit strategy
        retrofit_strategy_dataset = self.get_input_dataset("retrofit_strategy")
        if retrofit_strategy_dataset is not None:
            retrofit_strategy = list(retrofit_strategy_dataset.get_csv_reader())
        else:
            retrofit_strategy = None

        # Get hazard input
        hazard_dataset_id = self.get_parameter("hazard_id")

        # Hazard type of the exposure
        hazard_type = self.get_parameter("hazard_type")

        # Get Fragility key
        fragility_key = self.get_parameter("fragility_key")
        if fragility_key is None:
            fragility_key = BuildingUtil.DEFAULT_TSUNAMI_MMAX_FRAGILITY_KEY if hazard_type == 'tsunami' else \
                BuildingUtil.DEFAULT_FRAGILITY_KEY
            self.set_parameter("fragility_key", fragility_key)

        user_defined_cpu = 1

        if not self.get_parameter("num_cpu") is None and self.get_parameter("num_cpu") > 0:
            user_defined_cpu = self.get_parameter("num_cpu")

        num_workers = AnalysisUtil.determine_parallelism_locally(self, len(bldg_set), user_defined_cpu)

        avg_bulk_input_size = int(len(bldg_set) / num_workers)
        inventory_args = []
        count = 0
        inventory_list = list(bldg_set)
        while count < len(inventory_list):
            inventory_args.append(inventory_list[count:count + avg_bulk_input_size])
            count += avg_bulk_input_size

        (ds_results, damage_results) = self.building_damage_concurrent_future(self.building_damage_analysis_bulk_input,
                                                                              num_workers,
                                                                              inventory_args,
                                                                              repeat(retrofit_strategy),
                                                                              repeat(hazard_type),
                                                                              repeat(hazard_dataset_id))

        self.set_result_csv_data("ds_result", ds_results, name=self.get_parameter("result_name"))
        self.set_result_json_data("damage_result",
                                  damage_results,
                                  name=self.get_parameter("result_name") + "_additional_info")

        return True

    def building_damage_concurrent_future(self, function_name, parallelism, *args):
        """Utilizes concurrent.future module.

        Args:
            function_name (function): The function to be parallelized.
            parallelism (int): Number of workers in parallelization.
            *args: All the arguments in order to pass into parameter function_name.

        Returns:
            list: A list of ordered dictionaries with building damage values and other data/metadata.

        """
        output_ds = []
        output_dmg = []
        with concurrent.futures.ProcessPoolExecutor(max_workers=parallelism) as executor:
            for ret1, ret2 in executor.map(function_name, *args):
                output_ds.extend(ret1)
                output_dmg.extend(ret2)

        return output_ds, output_dmg

    def building_damage_analysis_bulk_input(self, buildings, retrofit_strategy, hazard_type, hazard_dataset_id):
        """Run analysis for multiple buildings.

        Args:
            buildings (list): Multiple buildings from input inventory set.
            retrofit_strategy (list): building guid and its retrofit level 0, 1, 2, etc. This is Optional
            hazard_type (str): Hazard type, either earthquake, tornado, or tsunami.
            hazard_dataset_id (str): An id of the hazard exposure.

        Returns:
            list: A list of ordered dictionaries with building damage values and other data/metadata.

        """

        fragility_key = self.get_parameter("fragility_key")
        fragility_sets = self.fragilitysvc.match_inventory(self.get_input_dataset("dfr3_mapping_set"), buildings,
                                                           fragility_key, retrofit_strategy)
        values_payload = []
        unmapped_buildings = []
        mapped_buildings = []
        for b in buildings:
            bldg_id = b["id"]
            if bldg_id in fragility_sets:
                location = GeoUtil.get_location(b)
                loc = str(location.y) + "," + str(location.x)
                demands = AnalysisUtil.get_hazard_demand_types(b, fragility_sets[bldg_id], hazard_type)
                units = fragility_sets[bldg_id].demand_units
                value = {
                    "demands": demands,
                    "units": units,
                    "loc": loc
                }
                values_payload.append(value)
                mapped_buildings.append(b)
            else:
                unmapped_buildings.append(b)

        # not needed anymore as they are already split into mapped and unmapped
        del buildings

        if hazard_type == 'earthquake':
            hazard_vals = self.hazardsvc.post_earthquake_hazard_values(hazard_dataset_id, values_payload)
        elif hazard_type == 'tornado':
            hazard_vals = self.hazardsvc.post_tornado_hazard_values(hazard_dataset_id, values_payload,
                                                                    self.get_parameter('seed'))
        elif hazard_type == 'tsunami':
            hazard_vals = self.hazardsvc.post_tsunami_hazard_values(hazard_dataset_id, values_payload)
        elif hazard_type == 'hurricane':
            hazard_vals = self.hazardsvc.post_hurricane_hazard_values(hazard_dataset_id, values_payload)
        elif hazard_type == 'flood':
            hazard_vals = self.hazardsvc.post_flood_hazard_values(hazard_dataset_id, values_payload)
        else:
            raise ValueError("The provided hazard type is not supported yet by this analysis")

        ds_results = []
        damage_results = []

        i = 0
        for b in mapped_buildings:
            ds_result = dict()
            damage_result = dict()
            dmg_probability = dict()
            dmg_interval = dict()
            b_id = b["id"]
            selected_fragility_set = fragility_sets[b_id]

            # TODO: Once all fragilities are migrated to new format, we can remove this condition
            if isinstance(selected_fragility_set.fragility_curves[0], DFR3Curve):
                # Supports multiple demand types in same fragility
                b_haz_vals = AnalysisUtil.update_precision_of_lists(hazard_vals[i]["hazardValues"])
                b_demands = hazard_vals[i]["demands"]
                b_units = hazard_vals[i]["units"]

                hval_dict = dict()
                j = 0

                # To calculate damage, use demand type name from fragility that will be used in the expression, instead
                # of using what the hazard service returns. There could be a difference "SA" in DFR3 vs "1.07 SA"
                # from hazard
                for d in selected_fragility_set.demand_types:
                    hval_dict[d] = b_haz_vals[j]
                    j += 1
                if not AnalysisUtil.do_hazard_values_have_errors(hazard_vals[i]["hazardValues"]):
                    building_args = selected_fragility_set.construct_expression_args_from_inventory(b)

                    building_period = selected_fragility_set.fragility_curves[0].get_building_period(
                        selected_fragility_set.curve_parameters, **building_args)

                    dmg_probability = selected_fragility_set.calculate_limit_state(
                        hval_dict, **building_args, period=building_period)
                    dmg_interval = selected_fragility_set.calculate_damage_interval(
                        dmg_probability, hazard_type=hazard_type, inventory_type="building")
            else:
                raise ValueError("One of the fragilities is in deprecated format. This should not happen. If you are "
                                 "seeing this please report the issue.")

            ds_result['guid'] = b['properties']['guid']
            damage_result['guid'] = b['properties']['guid']

            ds_result.update(dmg_probability)
            ds_result.update(dmg_interval)
            ds_result['haz_expose'] = AnalysisUtil.get_exposure_from_hazard_values(b_haz_vals, hazard_type)

            damage_result['fragility_id'] = selected_fragility_set.id
            damage_result['demandtype'] = b_demands
            damage_result['demandunits'] = b_units
            damage_result['hazardval'] = b_haz_vals

            ds_results.append(ds_result)
            damage_results.append(damage_result)
            i += 1

        for b in unmapped_buildings:
            ds_result = dict()
            damage_result = dict()
            ds_result['guid'] = b['properties']['guid']
            damage_result['guid'] = b['properties']['guid']
            damage_result['fragility_id'] = None
            damage_result['demandtype'] = None
            damage_result['demandunits'] = None
            damage_result['hazardval'] = None

            ds_results.append(ds_result)
            damage_results.append(damage_result)

        return ds_results, damage_results

    def get_spec(self):
        """Get specifications of the building damage analysis.

        Returns:
            obj: A JSON object of specifications of the building damage analysis.

        """
        return {
            'name': 'building-damage',
            'description': 'building damage analysis',
            'input_parameters': [
                {
                    'id': 'result_name',
                    'required': True,
                    'description': 'result dataset name',
                    'type': str
                },
                {
                    'id': 'hazard_type',
                    'required': True,
                    'description': 'Hazard Type (e.g. earthquake)',
                    'type': str
                },
                {
                    'id': 'hazard_id',
                    'required': True,
                    'description': 'Hazard ID',
                    'type': str
                },
                {
                    'id': 'fragility_key',
                    'required': False,
                    'description': 'Fragility key to use in mapping dataset',
                    'type': str
                },
                {
                    'id': 'use_liquefaction',
                    'required': False,
                    'description': 'Use liquefaction',
                    'type': bool
                },
                {
                    'id': 'use_hazard_uncertainty',
                    'required': False,
                    'description': 'Use hazard uncertainty',
                    'type': bool
                },
                {
                    'id': 'num_cpu',
                    'required': False,
                    'description': 'If using parallel execution, the number of cpus to request',
                    'type': int
                },
                {
                    'id': 'seed',
                    'required': False,
                    'description': 'Initial seed for the tornado hazard value',
                    'type': int
                }
            ],
            'input_datasets': [
                {
                    'id': 'buildings',
                    'required': True,
                    'description': 'Building Inventory',
                    'type': ['ergo:buildingInventoryVer4', 'ergo:buildingInventoryVer5',
                             'ergo:buildingInventoryVer6', 'ergo:buildingInventoryVer7'],
                },
                {
                    'id': 'dfr3_mapping_set',
                    'required': True,
                    'description': 'DFR3 Mapping Set Object',
                    'type': ['incore:dfr3MappingSet'],
                },
                {
                    'id': 'retrofit_strategy',
                    'required': False,
                    'description': 'Building retrofit strategy that contains guid and retrofit method',
                    'type': ['incore:retrofitStrategy']
                }
            ],
            'output_datasets': [
                {
                    'id': 'ds_result',
                    'parent_type': 'buildings',
                    'description': 'CSV file of damage states for building structural damage',
                    'type': 'ergo:buildingDamageVer6'
                },
                {
                    'id': 'damage_result',
                    'parent_type': 'buildings',
                    'description': 'Json file with information about applied hazard value and fragility',
                    'type': 'incore:buildingDamageSupplement'
                }
            ]
        }
Example #4
0
class EpfDamage(BaseAnalysis):
    """Computes electric power facility structural damage for an earthquake, tsunami, tornado, and hurricane hazards.

    Args:
        incore_client (IncoreClient): Service authentication.

    """

    DEFAULT_LIQ_FRAGILITY_KEY = "pgd"
    DEFAULT_FRAGILITY_KEY = "pga"

    def __init__(self, incore_client):
        self.hazardsvc = HazardService(incore_client)
        self.fragilitysvc = FragilityService(incore_client)

        super(EpfDamage, self).__init__(incore_client)

    def run(self):
        """Executes electric power facility damage analysis."""
        epf_set = self.get_input_dataset("epfs").get_inventory_reader()

        # Get Fragility key
        fragility_key = self.get_parameter("fragility_key")
        if fragility_key is None:
            fragility_key = self.DEFAULT_FRAGILITY_KEY
            self.set_parameter("fragility_key", fragility_key)

        # Get hazard input
        hazard_dataset_id = self.get_parameter("hazard_id")

        # Hazard type, note this is here for future use if additional hazards are supported by this analysis
        hazard_type = self.get_parameter("hazard_type")

        # Hazard Uncertainty
        use_hazard_uncertainty = False
        if self.get_parameter("use_hazard_uncertainty") is not None:
            use_hazard_uncertainty = self.get_parameter(
                "use_hazard_uncertainty")

        if use_hazard_uncertainty:
            raise ValueError("Uncertainty is not implemented yet.")

        user_defined_cpu = 1

        if not self.get_parameter("num_cpu") is None and self.get_parameter(
                "num_cpu") > 0:
            user_defined_cpu = self.get_parameter("num_cpu")

        num_workers = AnalysisUtil.determine_parallelism_locally(
            self, len(epf_set), user_defined_cpu)

        avg_bulk_input_size = int(len(epf_set) / num_workers)
        inventory_args = []
        count = 0
        inventory_list = list(epf_set)
        while count < len(inventory_list):
            inventory_args.append(inventory_list[count:count +
                                                 avg_bulk_input_size])
            count += avg_bulk_input_size

        (ds_results, damage_results) = self.epf_damage_concurrent_future(
            self.epf_damage_analysis_bulk_input, num_workers, inventory_args,
            repeat(hazard_type), repeat(hazard_dataset_id))

        self.set_result_csv_data("result",
                                 ds_results,
                                 name=self.get_parameter("result_name"))
        self.set_result_json_data("metadata",
                                  damage_results,
                                  name=self.get_parameter("result_name") +
                                  "_additional_info")

        return True

    def epf_damage_concurrent_future(self, function_name, num_workers, *args):
        """Utilizes concurrent.future module.

        Args:
            function_name (function): The function to be parallelized.
            num_workers (int): Maximum number workers in parallelization.
            *args: All the arguments in order to pass into parameter function_name.

        Returns:
            list: A list of ordered dictionaries with epf damage values and other data/metadata.

        """

        output_ds = []
        output_dmg = []
        with concurrent.futures.ProcessPoolExecutor(
                max_workers=num_workers) as executor:
            for ret1, ret2 in executor.map(function_name, *args):
                output_ds.extend(ret1)
                output_dmg.extend(ret2)

        return output_ds, output_dmg

    def epf_damage_analysis_bulk_input(self, epfs, hazard_type,
                                       hazard_dataset_id):
        """Run analysis for multiple epfs.

        Args:
            epfs (list): Multiple epfs from input inventory set.
            hazard_type (str): A type of hazard exposure (earthquake, tsunami, tornado, or hurricane).
            hazard_dataset_id (str): An id of the hazard exposure.

        Returns:
            list: A list of ordered dictionaries with epf damage values and other data/metadata.

        """

        use_liquefaction = False
        liquefaction_available = False

        fragility_key = self.get_parameter("fragility_key")

        fragility_set = self.fragilitysvc.match_inventory(
            self.get_input_dataset("dfr3_mapping_set"), epfs, fragility_key)

        if hazard_type == "earthquake":
            liquefaction_fragility_key = self.get_parameter(
                "liquefaction_fragility_key")
            if self.get_parameter("use_liquefaction") is True:
                if liquefaction_fragility_key is None:
                    liquefaction_fragility_key = self.DEFAULT_LIQ_FRAGILITY_KEY

                use_liquefaction = self.get_parameter("use_liquefaction")

                # Obtain the geology dataset
                geology_dataset_id = self.get_parameter(
                    "liquefaction_geology_dataset_id")

                if geology_dataset_id is not None:
                    fragility_sets_liq = self.fragilitysvc.match_inventory(
                        self.get_input_dataset("dfr3_mapping_set"), epfs,
                        liquefaction_fragility_key)

                    if fragility_sets_liq is not None:
                        liquefaction_available = True

        values_payload = []
        values_payload_liq = []
        unmapped_epfs = []
        mapped_epfs = []
        for epf in epfs:
            epf_id = epf["id"]
            if epf_id in fragility_set:
                location = GeoUtil.get_location(epf)
                loc = str(location.y) + "," + str(location.x)
                demands = fragility_set[epf_id].demand_types
                units = fragility_set[epf_id].demand_units
                value = {"demands": demands, "units": units, "loc": loc}
                values_payload.append(value)
                mapped_epfs.append(epf)

                if liquefaction_available and epf["id"] in fragility_sets_liq:
                    fragility_set_liq = fragility_sets_liq[epf["id"]]
                    demands_liq = fragility_set_liq.demand_types
                    units_liq = fragility_set_liq.demand_units
                    value_liq = {
                        "demands": demands_liq,
                        "units": units_liq,
                        "loc": loc
                    }
                    values_payload_liq.append(value_liq)
            else:
                unmapped_epfs.append(epf)

        if hazard_type == 'earthquake':
            hazard_vals = self.hazardsvc.post_earthquake_hazard_values(
                hazard_dataset_id, values_payload)
        elif hazard_type == 'tornado':
            hazard_vals = self.hazardsvc.post_tornado_hazard_values(
                hazard_dataset_id, values_payload)
        elif hazard_type == 'hurricane':
            # TODO: implement hurricane
            raise ValueError('Hurricane hazard has not yet been implemented!')
        elif hazard_type == 'tsunami':
            hazard_vals = self.hazardsvc.post_tsunami_hazard_values(
                hazard_dataset_id, values_payload)
        else:
            raise ValueError("Missing hazard type.")

        liquefaction_resp = None
        if liquefaction_available:
            liquefaction_resp = self.hazardsvc.post_liquefaction_values(
                hazard_dataset_id, geology_dataset_id, values_payload_liq)

        ds_results = []
        damage_results = []

        i = 0
        for epf in mapped_epfs:
            ds_result = dict()
            damage_result = dict()
            selected_fragility_set = fragility_set[epf["id"]]

            if isinstance(selected_fragility_set.fragility_curves[0],
                          DFR3Curve):
                hazard_val = AnalysisUtil.update_precision_of_lists(
                    hazard_vals[i]["hazardValues"])
                input_demand_types = hazard_vals[i]["demands"]
                input_demand_units = hazard_vals[i]["units"]

                hval_dict = dict()
                j = 0
                for d in selected_fragility_set.demand_types:
                    hval_dict[d] = hazard_val[j]
                    j += 1

                epf_args = selected_fragility_set.construct_expression_args_from_inventory(
                    epf)
                limit_states = selected_fragility_set.calculate_limit_state(
                    hval_dict, inventory_type='electric_facility', **epf_args)

                if liquefaction_resp is not None:
                    fragility_set_liq = fragility_sets_liq[epf["id"]]

                    if isinstance(fragility_set_liq.fragility_curves[0],
                                  DFR3Curve):
                        liq_hazard_vals = AnalysisUtil.update_precision_of_lists(
                            liquefaction_resp[i]["pgdValues"])
                        liq_demand_types = liquefaction_resp[i]["demands"]
                        liq_demand_units = liquefaction_resp[i]["units"]
                        liquefaction_prob = liquefaction_resp[i][
                            'liqProbability']

                        hval_dict_liq = dict()

                        for j, d in enumerate(fragility_set_liq.demand_types):
                            hval_dict_liq[d] = liq_hazard_vals[j]

                        facility_liq_args = fragility_set_liq.construct_expression_args_from_inventory(
                            epf)
                        pgd_limit_states = \
                            fragility_set_liq.calculate_limit_state(
                                hval_dict_liq, inventory_type="electric_facility",
                                **facility_liq_args)
                    else:
                        raise ValueError(
                            "One of the fragilities is in deprecated format. "
                            "This should not happen If you are seeing this please report the issue."
                        )

                    limit_states = AnalysisUtil.adjust_limit_states_for_pgd(
                        limit_states, pgd_limit_states)

                dmg_interval = selected_fragility_set.calculate_damage_interval(
                    limit_states,
                    hazard_type=hazard_type,
                    inventory_type='electric_facility')
            else:
                raise ValueError(
                    "One of the fragilities is in deprecated format. This should not happen. If you are "
                    "seeing this please report the issue.")

            ds_result["guid"] = epf["properties"]["guid"]
            ds_result.update(limit_states)
            ds_result.update(dmg_interval)
            ds_result[
                'haz_expose'] = AnalysisUtil.get_exposure_from_hazard_values(
                    hazard_val, hazard_type)

            damage_result['guid'] = epf['properties']['guid']
            damage_result['fragility_id'] = selected_fragility_set.id
            damage_result["demandtypes"] = input_demand_types
            damage_result["demandunits"] = input_demand_units
            damage_result["hazardtype"] = hazard_type
            damage_result["hazardvals"] = hazard_val

            if hazard_type == "earthquake" and use_liquefaction is True:
                if liquefaction_available:
                    damage_result['liq_fragility_id'] = fragility_sets_liq[
                        epf["id"]].id
                    damage_result['liqdemandtypes'] = liq_demand_types
                    damage_result['liqdemandunits'] = liq_demand_units
                    damage_result['liqhazval'] = liq_hazard_vals
                    damage_result['liqprobability'] = liquefaction_prob
                else:
                    damage_result['liq_fragility_id'] = None
                    damage_result['liqdemandtypes'] = None
                    damage_result['liqdemandunits'] = None
                    damage_result['liqhazval'] = None
                    damage_result['liqprobability'] = None

            ds_results.append(ds_result)
            damage_results.append(damage_result)

            i += 1

        #############################################################

        # unmapped
        for epf in unmapped_epfs:
            ds_result = dict()
            damage_result = dict()
            ds_result['guid'] = epf['properties']['guid']
            damage_result['guid'] = epf['properties']['guid']
            damage_result['fragility_id'] = None
            damage_result["demandtypes"] = None
            damage_result['demandunits'] = None
            damage_result["hazardtype"] = None
            damage_result['hazardval'] = None
            if hazard_type == "earthquake" and use_liquefaction is True:
                damage_result['liq_fragility_id'] = None
                damage_result['liqdemandtypes'] = None
                damage_result['liqdemandunits'] = None
                damage_result['liqhazval'] = None
                damage_result['liqprobability'] = None

            ds_results.append(ds_result)
            damage_results.append(damage_result)

        return ds_results, damage_results

    def get_spec(self):
        """Get specifications of the epf damage analysis.

        Returns:
            obj: A JSON object of specifications of the epf damage analysis.

        """
        return {
            'name':
            'epf-damage',
            'description':
            'Electric Power Facility damage analysis.',
            'input_parameters': [
                {
                    'id': 'result_name',
                    'required': True,
                    'description': 'A name of the resulting dataset',
                    'type': str
                },
                {
                    'id': 'hazard_type',
                    'required': True,
                    'description': 'Hazard type (e.g. earthquake).',
                    'type': str
                },
                {
                    'id':
                    'hazard_id',
                    'required':
                    True,
                    'description':
                    'Hazard ID which defines the particular hazard (e.g. New madrid earthquake '
                    'using Atkinson Boore 1995).',
                    'type':
                    str
                },
                {
                    'id': 'fragility_key',
                    'required': False,
                    'description':
                    'Fragility key to use in mapping dataset ()',
                    'type': str
                },
                {
                    'id': 'liquefaction_fragility_key',
                    'required': False,
                    'description':
                    'Fragility key to use in liquefaction mapping dataset',
                    'type': str
                },
                {
                    'id': 'use_liquefaction',
                    'required': False,
                    'description':
                    'Use a ground liquifacition to modify damage interval.',
                    'type': bool
                },
                {
                    'id':
                    'liquefaction_geology_dataset_id',
                    'required':
                    False,
                    'description':
                    'Liquefaction geology/susceptibility dataset id. '
                    'If not provided, liquefaction will be ignored',
                    'type':
                    str
                },
                {
                    'id': 'use_hazard_uncertainty',
                    'required': False,
                    'description': 'Use hazard uncertainty',
                    'type': bool
                },
                {
                    'id': 'num_cpu',
                    'required': False,
                    'description':
                    'If using parallel execution, the number of cpus to request.',
                    'type': int
                },
            ],
            'input_datasets': [{
                'id': 'epfs',
                'required': True,
                'description': 'Electric Power Facility Inventory',
                'type': ['incore:epf', 'ergo:epf'],
            }, {
                'id': 'dfr3_mapping_set',
                'required': True,
                'description': 'DFR3 Mapping Set Object',
                'type': ['incore:dfr3MappingSet'],
            }],
            'output_datasets': [{
                'id': 'result',
                'parent_type': 'epfs',
                'type': 'incore:epfDamageVer3'
            }, {
                'id':
                'metadata',
                'parent_type':
                'epfs',
                'description':
                'additional metadata in json file about applied hazard value and '
                'fragility',
                'type':
                'incore:epfDamageSupplement'
            }]
        }