Exemple #1
0
    def calculation_unit(self, loss_type, assets):
        """
        :returns:
          a list of instances of `..base.CalculationUnit` for the given
          `assets` to be run in the celery task
        """

        # assume all assets have the same taxonomy
        taxonomy = assets[0].taxonomy
        risk_model = self.risk_models[taxonomy][loss_type]

        time_span, tses = self.hazard_times()

        # If we are computing ground motion values on the fly we need
        # logic trees
        if self.rc.hazard_outputs()[0].output_type == "ses":
            ltp = logictree.LogicTreeProcessor.from_hc(self.rc)
        else:
            ltp = None

        return workflows.CalculationUnit(
            loss_type,
            workflows.ProbabilisticEventBased(
                risk_model.vulnerability_function,
                self.rnd.randint(0, models.MAX_SINT_32),
                self.rc.asset_correlation, time_span, tses,
                self.rc.loss_curve_resolution, self.rc.conditional_loss_poes,
                self.rc.insured_losses),
            hazard_getters.GroundMotionValuesGetter(
                self.rc.hazard_outputs(), assets,
                self.rc.best_maximum_distance, risk_model.imt,
                self.hazard_seeds, ltp))
Exemple #2
0
    def test_mean_based_with_partial_correlation(self):
        # This is a regression test. Data has not been checked
        vf = (scientific.VulnerabilityFunction('SOME-TAXONOMY', 'PGA',
                                               [0.001, 0.2, 0.3, 0.5, 0.7],
                                               [0.01, 0.1, 0.2, 0.4, 0.8],
                                               [0.01, 0.02, 0.02, 0.01, 0.03]))
        gmvs = numpy.array([[10., 20., 30., 40., 50.], [1., 2., 3., 4., 5.]])
        epsilons = scientific.make_epsilons(gmvs, seed=1, correlation=0.5)
        loss_matrix = vf.apply_to(gmvs, epsilons)

        losses_poes = scientific.event_based(loss_matrix[0], 120, 30, 4)
        first_curve_integral = scientific.average_loss(losses_poes)

        self.assertAlmostEqual(0.48983614471, first_curve_integral)

        wf = workflows.ProbabilisticEventBased(
            'PGA',
            'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            investigation_time=50,
            risk_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=False)
        out = wf(self.loss_type, assets, gmvs, epsilons, [1, 2, 3, 4, 5])
        self.assert_similar(
            out.event_loss_table, {
                1: 15.332714802464356,
                2: 16.21582466071975,
                3: 15.646630129345354,
                4: 15.285164778325353,
                5: 15.860930792931873,
            })
Exemple #3
0
    def test_mean_based_with_partial_correlation(self):
        # This is a regression test. Data has not been checked
        vf = (
            scientific.VulnerabilityFunction(
                'SOME-TAXONOMY', 'PGA',
                [0.001, 0.2, 0.3, 0.5, 0.7],
                [0.01, 0.1, 0.2, 0.4, 0.8],
                [0.01, 0.02, 0.02, 0.01, 0.03]))
        gmvs = numpy.array([[10., 20., 30., 40., 50.],
                           [1., 2., 3., 4., 5.]])
        epsilons = scientific.make_epsilons(gmvs, seed=1, correlation=0.5)
        loss_matrix = vf.apply_to(gmvs, epsilons)

        losses_poes = scientific.event_based(loss_matrix[0], .25, 4)
        first_curve_integral = scientific.average_loss(losses_poes)

        self.assertAlmostEqual(0.48983614471, first_curve_integral)

        wf = workflows.ProbabilisticEventBased(
            'PGA', 'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            investigation_time=50,
            risk_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=False
            )
        wf.riskmodel = mock.MagicMock()
        out = wf(self.loss_type, assets, gmvs, epsilons, [1, 2, 3, 4, 5])
        numpy.testing.assert_almost_equal(
            out.average_losses, [0.01987912, 0.01929152])
Exemple #4
0
    def test_insured_loss_mean_based(self):
        vf = scientific.VulnerabilityFunction('VF', 'PGA',
                                              [0.001, 0.2, 0.3, 0.5, 0.7],
                                              [0.01, 0.1, 0.2, 0.4, 0.8],
                                              [0.0, 0.0, 0.0, 0.0, 0.0])

        epsilons = scientific.make_epsilons(gmf[0:2], seed=1, correlation=0)
        loss_ratios = vf.apply_to(gmf[0:2], epsilons)

        values = [3000., 1000.]
        insured_limits = [1250., 40.]
        deductibles = [40., 13.]

        insured_average_losses = [
            scientific.average_loss(
                scientific.event_based(
                    scientific.insured_losses(lrs, deductibles[i] / values[i],
                                              insured_limits[i] / values[i]),
                    50, 50, 20)) for i, lrs in enumerate(loss_ratios)
        ]
        numpy.testing.assert_allclose([0.05667045, 0.02542965],
                                      insured_average_losses)

        wf = workflows.ProbabilisticEventBased(
            'PGA',
            'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            investigation_time=50,
            risk_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=True)
        out = wf(self.loss_type, assets, gmf[0:2], epsilons, [1, 2, 3, 4, 5])
        self.assert_similar(out.event_loss_table, {
            1: 0.20314761658291458,
            2: 0,
            3: 0,
            4: 0,
            5: 0,
        })
Exemple #5
0
    def test_mean_based_with_no_correlation(self):
        # This is a regression test. Data has not been checked
        vf = (scientific.VulnerabilityFunction('SOME-TAXONOMY', 'PGA',
                                               [0.001, 0.2, 0.3, 0.5, 0.7],
                                               [0.01, 0.1, 0.2, 0.4, 0.8],
                                               [0.01, 0.02, 0.02, 0.01, 0.03]))
        gmvs = numpy.array([[10., 20., 30., 40., 50.], [1., 2., 3., 4., 5.]])

        epsilons = scientific.make_epsilons(gmvs, seed=1, correlation=0)
        loss_matrix = vf.apply_to(gmvs, epsilons)
        losses_poes = scientific.event_based(loss_matrix[0],
                                             120,
                                             30,
                                             curve_resolution=4)

        first_curve_integral = scientific.average_loss(losses_poes)

        self.assertAlmostEqual(0.500993631, first_curve_integral)

        wf = workflows.ProbabilisticEventBased(
            'PGA',
            'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            investigation_time=50,
            risk_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=False)
        out = wf(self.loss_type, assets, gmvs, epsilons, [1, 2, 3, 4, 5])
        self.assert_similar(
            out.event_loss_table, {
                1: 16.246646231503398,
                2: 15.613885199116158,
                3: 15.669704465134854,
                4: 16.241922530992454,
                5: 16.010104452203464,
            })
Exemple #6
0
    def test_mean_based_with_perfect_correlation(self):
        # This is a regression test. Data has not been checked
        vf = (scientific.VulnerabilityFunction('SOME-TAXONOMY', 'PGA',
                                               [0.001, 0.2, 0.3, 0.5, 0.7],
                                               [0.01, 0.1, 0.2, 0.4, 0.8],
                                               [0.01, 0.02, 0.02, 0.01, 0.03]))

        gmvs = [[10., 20., 30., 40., 50.], [1., 2., 3., 4., 5.]]

        epsilons = scientific.make_epsilons(gmvs, seed=1, correlation=1)
        loss_matrix = vf.apply_to(gmvs, epsilons)
        losses_poes = scientific.event_based(loss_matrix[0], 120, 30, 4)

        first_curve_integral = scientific.average_loss(losses_poes)

        self.assertAlmostEqual(0.483041416, first_curve_integral)

        wf = workflows.ProbabilisticEventBased(
            'PGA',
            'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            investigation_time=50,
            risk_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=False)
        out = wf(self.loss_type, assets, gmvs, epsilons, [1, 2, 3, 4, 5])
        self.assert_similar(
            out.event_loss_table, {
                1: 15.232320555463319,
                2: 16.248173683693864,
                3: 15.583030510462981,
                4: 15.177382760499968,
                5: 15.840499250058254,
            })