def waterfacilityset_damage_analysis_bulk_input(self, facilities,
                                                    hazard_type,
                                                    hazard_dataset_id):
        """Gets applicable fragilities and calculates damage

        Args:
            facilities (list): Multiple water facilities from input inventory set.
            hazard_type (str): A hazard type of the hazard exposure (earthquake, tsunami, tornado, or hurricane).
            hazard_dataset_id (str): An id of the hazard exposure.

        Returns:
            list: A list of ordered dictionaries with water facility damage values
            list: A list of ordered dictionaries with other water facility data/metadata
        """

        # Liquefaction related variables
        use_liquefaction = False
        liquefaction_available = False
        fragility_sets_liq = None
        liquefaction_resp = None
        geology_dataset_id = None
        liq_hazard_vals = None
        liq_demand_types = None
        liq_demand_units = None
        liquefaction_prob = None
        loc = None

        # Obtain the fragility key
        fragility_key = self.get_parameter("fragility_key")

        if fragility_key is None:
            if hazard_type == 'tsunami':
                fragility_key = self.DEFAULT_TSU_FRAGILITY_KEY
            elif hazard_type == 'earthquake':
                fragility_key = self.DEFAULT_EQ_FRAGILITY_KEY
            else:
                raise ValueError(
                    "Hazard type other than Earthquake and Tsunami are not currently supported."
                )

            self.set_parameter("fragility_key", fragility_key)

        # Obtain the fragility set
        fragility_sets = self.fragilitysvc.match_inventory(
            self.get_input_dataset("dfr3_mapping_set"), facilities,
            fragility_key)

        # Obtain the liquefaction fragility Key
        liquefaction_fragility_key = self.get_parameter(
            "liquefaction_fragility_key")

        if hazard_type == "earthquake":
            if self.get_parameter("use_liquefaction") is True:
                if liquefaction_fragility_key is None:
                    liquefaction_fragility_key = self.DEFAULT_LIQ_FRAGILITY_KEY

                use_liquefaction = self.get_parameter("use_liquefaction")

                # Obtain the geology dataset
                geology_dataset_id = self.get_parameter(
                    "liquefaction_geology_dataset_id")

                if geology_dataset_id is not None:
                    fragility_sets_liq = self.fragilitysvc.match_inventory(
                        self.get_input_dataset("dfr3_mapping_set"), facilities,
                        liquefaction_fragility_key)

                    if fragility_sets_liq is not None:
                        liquefaction_available = True

        # Determine whether to use hazard uncertainty
        uncertainty = self.get_parameter("use_hazard_uncertainty")

        # Setup fragility translation structures
        values_payload = []
        values_payload_liq = []
        unmapped_waterfacilities = []
        mapped_waterfacilities = []

        for facility in facilities:
            if facility["id"] in fragility_sets.keys():
                # Fill in generic details
                fragility_set = fragility_sets[facility["id"]]
                location = GeoUtil.get_location(facility)
                loc = str(location.y) + "," + str(location.x)
                demands = fragility_set.demand_types
                units = fragility_set.demand_units
                value = {"demands": demands, "units": units, "loc": loc}
                values_payload.append(value)
                mapped_waterfacilities.append(facility)

                # Fill in liquefaction parameters
                if liquefaction_available and facility[
                        "id"] in fragility_sets_liq:
                    fragility_set_liq = fragility_sets_liq[facility["id"]]
                    demands_liq = fragility_set_liq.demand_types
                    units_liq = fragility_set_liq.demand_units
                    value_liq = {
                        "demands": demands_liq,
                        "units": units_liq,
                        "loc": loc
                    }
                    values_payload_liq.append(value_liq)
            else:
                unmapped_waterfacilities.append(facility)

        del facilities

        if hazard_type == 'earthquake':
            hazard_resp = self.hazardsvc.post_earthquake_hazard_values(
                hazard_dataset_id, values_payload)
        elif hazard_type == 'tsunami':
            hazard_resp = self.hazardsvc.post_tsunami_hazard_values(
                hazard_dataset_id, values_payload)
        else:
            raise ValueError(
                "The provided hazard type is not supported yet by this analysis"
            )

        # Check if liquefaction is applicable
        if liquefaction_available:
            liquefaction_resp = self.hazardsvc.post_liquefaction_values(
                hazard_dataset_id, geology_dataset_id, values_payload_liq)

        # Calculate LS and DS
        facility_results = []
        damage_results = []

        for i, facility in enumerate(mapped_waterfacilities):
            fragility_set = fragility_sets[facility["id"]]
            limit_states = dict()
            dmg_intervals = dict()

            # Setup conditions for the analysis
            hazard_std_dev = 0

            if uncertainty:
                hazard_std_dev = random.random()

            if isinstance(fragility_set.fragility_curves[0], DFR3Curve):
                hazard_vals = AnalysisUtil.update_precision_of_lists(
                    hazard_resp[i]["hazardValues"])
                demand_types = hazard_resp[i]["demands"]
                demand_units = hazard_resp[i]["units"]

                hval_dict = dict()

                for j, d in enumerate(fragility_set.demand_types):
                    hval_dict[d] = hazard_vals[j]

                if not AnalysisUtil.do_hazard_values_have_errors(
                        hazard_resp[i]["hazardValues"]):
                    facility_args = fragility_set.construct_expression_args_from_inventory(
                        facility)
                    limit_states = \
                        fragility_set.calculate_limit_state(hval_dict,
                                                            std_dev=hazard_std_dev,
                                                            inventory_type='water_facility',
                                                            **facility_args)
                    # Evaluate liquefaction: if it is not none, then liquefaction is available
                    if liquefaction_resp is not None:
                        fragility_set_liq = fragility_sets_liq[facility["id"]]

                        if isinstance(fragility_set_liq.fragility_curves[0],
                                      DFR3Curve):
                            liq_hazard_vals = AnalysisUtil.update_precision_of_lists(
                                liquefaction_resp[i]["pgdValues"])
                            liq_demand_types = liquefaction_resp[i]["demands"]
                            liq_demand_units = liquefaction_resp[i]["units"]
                            liquefaction_prob = liquefaction_resp[i][
                                'liqProbability']

                            hval_dict_liq = dict()

                            for j, d in enumerate(
                                    fragility_set_liq.demand_types):
                                hval_dict_liq[d] = liq_hazard_vals[j]

                            facility_liq_args = fragility_set_liq.construct_expression_args_from_inventory(
                                facility)
                            pgd_limit_states = \
                                fragility_set_liq.calculate_limit_state(
                                    hval_dict_liq, std_dev=hazard_std_dev, inventory_type="water_facility",
                                    **facility_liq_args)
                        else:
                            raise ValueError(
                                "One of the fragilities is in deprecated format. "
                                "This should not happen If you are seeing this please report the issue."
                            )

                        limit_states = AnalysisUtil.adjust_limit_states_for_pgd(
                            limit_states, pgd_limit_states)

                    dmg_intervals = fragility_set.calculate_damage_interval(
                        limit_states,
                        hazard_type=hazard_type,
                        inventory_type='water_facility')
            else:
                raise ValueError(
                    "One of the fragilities is in deprecated format. This should not happen. If you are "
                    "seeing this please report the issue.")

            # TODO: ideally, this goes into a single variable declaration section

            facility_result = {
                'guid': facility['properties']['guid'],
                **limit_states,
                **dmg_intervals
            }
            facility_result[
                'haz_expose'] = AnalysisUtil.get_exposure_from_hazard_values(
                    hazard_vals, hazard_type)
            damage_result = dict()
            damage_result['guid'] = facility['properties']['guid']
            damage_result['fragility_id'] = fragility_set.id
            damage_result['demandtypes'] = demand_types
            damage_result['demandunits'] = demand_units
            damage_result['hazardtype'] = hazard_type
            damage_result['hazardvals'] = hazard_vals

            if use_liquefaction and fragility_sets_liq and geology_dataset_id:
                damage_result['liq_fragility_id'] = fragility_sets_liq[
                    facility["id"]].id
                damage_result['liqdemandtypes'] = liq_demand_types
                damage_result['liqdemandunits'] = liq_demand_units
                damage_result['liqhazval'] = liq_hazard_vals
                damage_result['liqprobability'] = liquefaction_prob
            else:
                damage_result['liq_fragility_id'] = None
                damage_result['liqdemandtypes'] = None
                damage_result['liqdemandunits'] = None
                damage_result['liqhazval'] = None
                damage_result['liqprobability'] = None

            facility_results.append(facility_result)
            damage_results.append(damage_result)

        for facility in unmapped_waterfacilities:
            facility_result = dict()
            damage_result = dict()
            facility_result['guid'] = facility['properties']['guid']
            damage_result['guid'] = facility['properties']['guid']
            damage_result['fragility_id'] = None
            damage_result['demandtypes'] = None
            damage_result['demandunits'] = None
            damage_result['hazardtype'] = None
            damage_result['hazardvals'] = None
            damage_result['liq_fragility_id'] = None
            damage_result['liqdemandtypes'] = None
            damage_result['liqdemandunits'] = None
            damage_result['liqhazval'] = None
            damage_result['liqprobability'] = None

            facility_results.append(facility_result)
            damage_results.append(damage_result)

        return facility_results, damage_results
Beispiel #2
0
    def epf_damage_analysis_bulk_input(self, epfs, hazard_type,
                                       hazard_dataset_id):
        """Run analysis for multiple epfs.

        Args:
            epfs (list): Multiple epfs from input inventory set.
            hazard_type (str): A type of hazard exposure (earthquake, tsunami, tornado, or hurricane).
            hazard_dataset_id (str): An id of the hazard exposure.

        Returns:
            list: A list of ordered dictionaries with epf damage values and other data/metadata.

        """

        use_liquefaction = False
        liquefaction_available = False

        fragility_key = self.get_parameter("fragility_key")

        fragility_set = self.fragilitysvc.match_inventory(
            self.get_input_dataset("dfr3_mapping_set"), epfs, fragility_key)

        if hazard_type == "earthquake":
            liquefaction_fragility_key = self.get_parameter(
                "liquefaction_fragility_key")
            if self.get_parameter("use_liquefaction") is True:
                if liquefaction_fragility_key is None:
                    liquefaction_fragility_key = self.DEFAULT_LIQ_FRAGILITY_KEY

                use_liquefaction = self.get_parameter("use_liquefaction")

                # Obtain the geology dataset
                geology_dataset_id = self.get_parameter(
                    "liquefaction_geology_dataset_id")

                if geology_dataset_id is not None:
                    fragility_sets_liq = self.fragilitysvc.match_inventory(
                        self.get_input_dataset("dfr3_mapping_set"), epfs,
                        liquefaction_fragility_key)

                    if fragility_sets_liq is not None:
                        liquefaction_available = True

        values_payload = []
        values_payload_liq = []
        unmapped_epfs = []
        mapped_epfs = []
        for epf in epfs:
            epf_id = epf["id"]
            if epf_id in fragility_set:
                location = GeoUtil.get_location(epf)
                loc = str(location.y) + "," + str(location.x)
                demands = fragility_set[epf_id].demand_types
                units = fragility_set[epf_id].demand_units
                value = {"demands": demands, "units": units, "loc": loc}
                values_payload.append(value)
                mapped_epfs.append(epf)

                if liquefaction_available and epf["id"] in fragility_sets_liq:
                    fragility_set_liq = fragility_sets_liq[epf["id"]]
                    demands_liq = fragility_set_liq.demand_types
                    units_liq = fragility_set_liq.demand_units
                    value_liq = {
                        "demands": demands_liq,
                        "units": units_liq,
                        "loc": loc
                    }
                    values_payload_liq.append(value_liq)
            else:
                unmapped_epfs.append(epf)

        if hazard_type == 'earthquake':
            hazard_vals = self.hazardsvc.post_earthquake_hazard_values(
                hazard_dataset_id, values_payload)
        elif hazard_type == 'tornado':
            hazard_vals = self.hazardsvc.post_tornado_hazard_values(
                hazard_dataset_id, values_payload)
        elif hazard_type == 'hurricane':
            # TODO: implement hurricane
            raise ValueError('Hurricane hazard has not yet been implemented!')
        elif hazard_type == 'tsunami':
            hazard_vals = self.hazardsvc.post_tsunami_hazard_values(
                hazard_dataset_id, values_payload)
        else:
            raise ValueError("Missing hazard type.")

        liquefaction_resp = None
        if liquefaction_available:
            liquefaction_resp = self.hazardsvc.post_liquefaction_values(
                hazard_dataset_id, geology_dataset_id, values_payload_liq)

        ds_results = []
        damage_results = []

        i = 0
        for epf in mapped_epfs:
            ds_result = dict()
            damage_result = dict()
            selected_fragility_set = fragility_set[epf["id"]]

            if isinstance(selected_fragility_set.fragility_curves[0],
                          DFR3Curve):
                hazard_val = AnalysisUtil.update_precision_of_lists(
                    hazard_vals[i]["hazardValues"])
                input_demand_types = hazard_vals[i]["demands"]
                input_demand_units = hazard_vals[i]["units"]

                hval_dict = dict()
                j = 0
                for d in selected_fragility_set.demand_types:
                    hval_dict[d] = hazard_val[j]
                    j += 1

                epf_args = selected_fragility_set.construct_expression_args_from_inventory(
                    epf)
                limit_states = selected_fragility_set.calculate_limit_state(
                    hval_dict, inventory_type='electric_facility', **epf_args)

                if liquefaction_resp is not None:
                    fragility_set_liq = fragility_sets_liq[epf["id"]]

                    if isinstance(fragility_set_liq.fragility_curves[0],
                                  DFR3Curve):
                        liq_hazard_vals = AnalysisUtil.update_precision_of_lists(
                            liquefaction_resp[i]["pgdValues"])
                        liq_demand_types = liquefaction_resp[i]["demands"]
                        liq_demand_units = liquefaction_resp[i]["units"]
                        liquefaction_prob = liquefaction_resp[i][
                            'liqProbability']

                        hval_dict_liq = dict()

                        for j, d in enumerate(fragility_set_liq.demand_types):
                            hval_dict_liq[d] = liq_hazard_vals[j]

                        facility_liq_args = fragility_set_liq.construct_expression_args_from_inventory(
                            epf)
                        pgd_limit_states = \
                            fragility_set_liq.calculate_limit_state(
                                hval_dict_liq, inventory_type="electric_facility",
                                **facility_liq_args)
                    else:
                        raise ValueError(
                            "One of the fragilities is in deprecated format. "
                            "This should not happen If you are seeing this please report the issue."
                        )

                    limit_states = AnalysisUtil.adjust_limit_states_for_pgd(
                        limit_states, pgd_limit_states)

                dmg_interval = selected_fragility_set.calculate_damage_interval(
                    limit_states,
                    hazard_type=hazard_type,
                    inventory_type='electric_facility')
            else:
                raise ValueError(
                    "One of the fragilities is in deprecated format. This should not happen. If you are "
                    "seeing this please report the issue.")

            ds_result["guid"] = epf["properties"]["guid"]
            ds_result.update(limit_states)
            ds_result.update(dmg_interval)
            ds_result[
                'haz_expose'] = AnalysisUtil.get_exposure_from_hazard_values(
                    hazard_val, hazard_type)

            damage_result['guid'] = epf['properties']['guid']
            damage_result['fragility_id'] = selected_fragility_set.id
            damage_result["demandtypes"] = input_demand_types
            damage_result["demandunits"] = input_demand_units
            damage_result["hazardtype"] = hazard_type
            damage_result["hazardvals"] = hazard_val

            if hazard_type == "earthquake" and use_liquefaction is True:
                if liquefaction_available:
                    damage_result['liq_fragility_id'] = fragility_sets_liq[
                        epf["id"]].id
                    damage_result['liqdemandtypes'] = liq_demand_types
                    damage_result['liqdemandunits'] = liq_demand_units
                    damage_result['liqhazval'] = liq_hazard_vals
                    damage_result['liqprobability'] = liquefaction_prob
                else:
                    damage_result['liq_fragility_id'] = None
                    damage_result['liqdemandtypes'] = None
                    damage_result['liqdemandunits'] = None
                    damage_result['liqhazval'] = None
                    damage_result['liqprobability'] = None

            ds_results.append(ds_result)
            damage_results.append(damage_result)

            i += 1

        #############################################################

        # unmapped
        for epf in unmapped_epfs:
            ds_result = dict()
            damage_result = dict()
            ds_result['guid'] = epf['properties']['guid']
            damage_result['guid'] = epf['properties']['guid']
            damage_result['fragility_id'] = None
            damage_result["demandtypes"] = None
            damage_result['demandunits'] = None
            damage_result["hazardtype"] = None
            damage_result['hazardval'] = None
            if hazard_type == "earthquake" and use_liquefaction is True:
                damage_result['liq_fragility_id'] = None
                damage_result['liqdemandtypes'] = None
                damage_result['liqdemandunits'] = None
                damage_result['liqhazval'] = None
                damage_result['liqprobability'] = None

            ds_results.append(ds_result)
            damage_results.append(damage_result)

        return ds_results, damage_results
Beispiel #3
0
    def epf_damage_analysis_bulk_input(self, epfs, hazard_type,
                                       hazard_dataset_id,
                                       use_hazard_uncertainty,
                                       use_liquefaction,
                                       liq_geology_dataset_id):
        """Run analysis for multiple epfs.

        Args:
            epfs (list): Multiple epfs from input inventory set.
            hazard_type (str): A type of hazard exposure (earthquake, tsunami, tornado, or hurricane).
            hazard_dataset_id (str): An id of the hazard exposure.
            use_hazard_uncertainty (bool):  Hazard uncertainty. True for using uncertainty when computing damage,
                False otherwise.
            use_liquefaction (bool): Liquefaction. True for using liquefaction information to modify the damage,
                False otherwise.
            liq_geology_dataset_id (str): geology_dataset_id (str): A dataset id for geology dataset for liquefaction.

        Returns:
            list: A list of ordered dictionaries with epf damage values and other data/metadata.

        """
        result = []

        fragility_key = self.get_parameter("fragility_key")

        fragility_set = dict()
        fragility_set = self.fragilitysvc.match_inventory(
            self.get_input_dataset("dfr3_mapping_set"), epfs, fragility_key)
        epf_results = []

        # Converting list of epfs into a dictionary for ease of reference
        list_epfs = epfs
        epfs = dict()
        for epf in list_epfs:
            epfs[epf["id"]] = epf
        del list_epfs  # Clear as it's not needed anymore

        processed_epf = []
        grouped_epfs = AnalysisUtil.group_by_demand_type(epfs, fragility_set)
        for demand, grouped_epf_items in grouped_epfs.items():
            input_demand_type = demand[0]
            input_demand_units = demand[1]

            # For every group of unique demand and demand unit, call the end-point once
            epf_chunks = list(AnalysisUtil.chunks(grouped_epf_items, 50))
            for epf_chunk in epf_chunks:
                points = []
                for epf_id in epf_chunk:
                    location = GeoUtil.get_location(epfs[epf_id])
                    points.append(str(location.y) + "," + str(location.x))

                if hazard_type == 'earthquake':
                    hazard_vals = self.hazardsvc.get_earthquake_hazard_values(
                        hazard_dataset_id, input_demand_type,
                        input_demand_units, points)
                elif hazard_type == 'tornado':
                    hazard_vals = self.hazardsvc.get_tornado_hazard_values(
                        hazard_dataset_id, input_demand_units, points)
                elif hazard_type == 'hurricane':
                    # TODO: implement hurricane
                    raise ValueError(
                        'Hurricane hazard has not yet been implemented!')

                elif hazard_type == 'tsunami':
                    hazard_vals = self.hazardsvc.get_tsunami_hazard_values(
                        hazard_dataset_id, input_demand_type,
                        input_demand_units, points)
                else:
                    raise ValueError("Missing hazard type.")

                # Parse the batch hazard value results and map them back to the building and fragility.
                # This is a potential pitfall as we are relying on the order of the returned results
                i = 0
                for epf_id in epf_chunk:
                    epf_result = collections.OrderedDict()
                    epf = epfs[epf_id]
                    hazard_val = hazard_vals[i]['hazardValue']

                    # Sometimes the geotiffs give large negative values for out of bounds instead of 0
                    if hazard_val <= 0.0:
                        hazard_val = 0.0

                    std_dev = 0.0
                    if use_hazard_uncertainty:
                        raise ValueError("Uncertainty Not Implemented!")

                    selected_fragility_set = fragility_set[epf_id]
                    limit_states = selected_fragility_set.calculate_limit_state(
                        hazard_val, std_dev=std_dev)
                    dmg_interval = AnalysisUtil.calculate_damage_interval(
                        limit_states)

                    epf_result['guid'] = epf['properties']['guid']
                    epf_result.update(limit_states)
                    epf_result.update(dmg_interval)
                    epf_result['demandtype'] = input_demand_type
                    epf_result['demandunits'] = input_demand_units
                    epf_result['hazardtype'] = hazard_type
                    epf_result['hazardval'] = hazard_val

                    epf_results.append(epf_result)
                    processed_epf.append(epf_id)
                    i = i + 1

        # when there is liquefaction, limit state need to be modified
        if hazard_type == 'earthquake' and use_liquefaction and liq_geology_dataset_id is not None:
            liq_fragility_key = self.get_parameter(
                "liquefaction_fragility_key")
            if liq_fragility_key is None:
                liq_fragility_key = self.DEFAULT_LIQ_FRAGILITY_KEY
            liq_fragility_set = self.fragilitysvc.match_inventory(
                self.get_input_dataset("dfr3_mapping_set"), epfs,
                liq_fragility_key)
            grouped_liq_epfs = AnalysisUtil.group_by_demand_type(
                epfs, liq_fragility_set)

            for liq_demand, grouped_liq_epf_items in grouped_liq_epfs.items():
                liq_input_demand_type = liq_demand[0]
                liq_input_demand_units = liq_demand[1]

                # For every group of unique demand and demand unit, call the end-point once
                liq_epf_chunks = list(
                    AnalysisUtil.chunks(grouped_liq_epf_items, 50))
                for liq_epf_chunk in liq_epf_chunks:
                    points = []
                    for liq_epf_id in liq_epf_chunk:
                        location = GeoUtil.get_location(epfs[liq_epf_id])
                        points.append(str(location.y) + "," + str(location.x))
                    liquefaction_vals = self.hazardsvc.get_liquefaction_values(
                        hazard_dataset_id, liq_geology_dataset_id,
                        liq_input_demand_units, points)

                    # Parse the batch hazard value results and map them back to the building and fragility.
                    # This is a potential pitfall as we are relying on the order of the returned results
                    i = 0
                    for liq_epf_id in liq_epf_chunk:
                        liq_hazard_val = liquefaction_vals[i][
                            liq_input_demand_type]

                        std_dev = 0.0
                        if use_hazard_uncertainty:
                            raise ValueError("Uncertainty Not Implemented!")

                        liquefaction_prob = liquefaction_vals[i][
                            'liqProbability']

                        selected_liq_fragility = liq_fragility_set[liq_epf_id]
                        pgd_limit_states = selected_liq_fragility.calculate_limit_state(
                            liq_hazard_val, std_dev=std_dev)

                        # match id and add liqhaztype, liqhazval, liqprobability field as well as rewrite limit
                        # states and dmg_interval
                        for epf_result in epf_results:
                            if epf_result['guid'] == epfs[liq_epf_id]['guid']:
                                limit_states = {
                                    "ls-slight": epf_result['ls-slight'],
                                    "ls-moderat": epf_result['ls-moderat'],
                                    "ls-extensi": epf_result['ls-extensi'],
                                    "ls-complet": epf_result['ls-complet']
                                }
                                liq_limit_states = AnalysisUtil.adjust_limit_states_for_pgd(
                                    limit_states, pgd_limit_states)
                                liq_dmg_interval = AnalysisUtil.calculate_damage_interval(
                                    liq_limit_states)
                                epf_result.update(liq_limit_states)
                                epf_result.update(liq_dmg_interval)
                                epf_result[
                                    'liqhaztype'] = liq_input_demand_type
                                epf_result['liqhazval'] = liq_hazard_val
                                epf_result[
                                    'liqprobability'] = liquefaction_prob
                        i = i + 1

        unmapped_limit_states = {
            "ls-slight": 0.0,
            "ls-moderat": 0.0,
            "ls-extensi": 0.0,
            "ls-complet": 0.0
        }
        unmapped_dmg_intervals = AnalysisUtil.calculate_damage_interval(
            unmapped_limit_states)
        for epf_id, epf in epfs.items():
            if epf_id not in processed_epf:
                unmapped_epf_result = collections.OrderedDict()
                unmapped_epf_result['guid'] = epf['properties']['guid']
                unmapped_epf_result.update(unmapped_limit_states)
                unmapped_epf_result.update(unmapped_dmg_intervals)
                unmapped_epf_result["demandtype"] = "None"
                unmapped_epf_result['demandunits'] = "None"
                unmapped_epf_result["hazardtype"] = "None"
                unmapped_epf_result['hazardval'] = 0.0
                unmapped_epf_result['liqhaztype'] = "NA"
                unmapped_epf_result['liqhazval'] = "NA"
                unmapped_epf_result['liqprobability'] = "NA"
                epf_results.append(unmapped_epf_result)

        return epf_results
Beispiel #4
0
    def waterfacility_damage_analysis(self, facility, fragility, liq_fragility,
                                      hazard_type, hazard_dataset_id,
                                      liq_geology_dataset_id, uncertainty):
        """Computes damage analysis for a single facility

        Args:
            facility (obj): A JSON mapping of a facility based on mapping attributes
            fragility (obj): A JSON description of fragility mapped to the building.
            liq_fragility (obj): A JSON description of liquefaction fragility mapped to the building.
            hazard_type (str): A string that indicates the hazard type
            hazard_dataset_id (str): Hazard id from the hazard service
            liq_geology_dataset_id (str): Geology dataset id from data service to use for liquefaction calculation, if
                applicable
            uncertainty (bool): Whether to use hazard standard deviation values for uncertainty

        Returns:
            OrderedDict: A dictionary with water facility damage values and other data/metadata.
        """
        std_dev = 0
        if uncertainty:
            std_dev = random.random()

        hazard_demand_type = fragility.demand_type
        demand_units = fragility.demand_units
        liq_hazard_type = ""
        liq_hazard_val = 0.0
        liquefaction_prob = 0.0
        location = GeoUtil.get_location(facility)

        point = str(location.y) + "," + str(location.x)

        if hazard_type == "earthquake":
            hazard_val_set = self.hazardsvc.get_earthquake_hazard_values(
                hazard_dataset_id, hazard_demand_type,
                demand_units, [point])
        elif hazard_type == "tsunami":
            hazard_val_set = self.hazardsvc.get_tsunami_hazard_values(
                hazard_dataset_id, hazard_demand_type, demand_units, [point])
        else:
            raise ValueError(
                "Hazard type other than Earthquake and Tsunami are not currently supported.")
        hazard_val = hazard_val_set[0]['hazardValue']
        if hazard_val < 0:
            hazard_val = 0

        limit_states = fragility.calculate_limit_state(hazard_val, std_dev)

        if liq_fragility is not None and liq_geology_dataset_id:
            liq_hazard_type = liq_fragility.demand_type
            pgd_demand_units = liq_fragility.demand_units
            point = str(location.y) + "," + str(location.x)

            liquefaction = self.hazardsvc.get_liquefaction_values(
                hazard_dataset_id, liq_geology_dataset_id,
                pgd_demand_units, [point])
            liq_hazard_val = liquefaction[0][liq_hazard_type]
            liquefaction_prob = liquefaction[0]['liqProbability']
            pgd_limit_states = liq_fragility.calculate_limit_state(liq_hazard_val, std_dev)

            limit_states = AnalysisUtil.adjust_limit_states_for_pgd(
                limit_states, pgd_limit_states)

        dmg_intervals = AnalysisUtil.calculate_damage_interval(limit_states)

        result = collections.OrderedDict()
        result = {**limit_states, **dmg_intervals}  # Needs py 3.5+
        metadata = collections.OrderedDict()
        metadata['guid'] = facility['properties']['guid']
        metadata['hazardtype'] = hazard_type
        metadata['demandtype'] = hazard_demand_type
        metadata['hazardval'] = hazard_val
        metadata['liqhaztype'] = liq_hazard_type
        metadata['liqhazval'] = liq_hazard_val
        metadata['liqprobability'] = liquefaction_prob

        result = {**metadata, **result}
        return result