Example #1
0
    def test_constant(self):
        expected = [10] * 100
        actual = scientific.event_based(expected, 50, 50, 11)

        numpy.testing.assert_allclose([10] * 11, actual.abscissae)
        numpy.testing.assert_allclose(
            numpy.arange(0, 1.1, 0.1), actual.ordinates)
Example #2
0
    def post_process(self):
        # compute aggregate loss curves
        for hazard_output in self.considered_hazard_outputs():
            loss_curve = models.LossCurve.objects.get(
                hazard_output=hazard_output,
                aggregate=True, output__oq_job=self.job)
            curve_data = loss_curve.aggregatelosscurvedata

            tses, time_span = self.hazard_times()

            aggregate_loss_curve = scientific.event_based(
                curve_data.losses, tses, time_span,
                curve_resolution=self.rc.loss_curve_resolution)

            curve_data.losses = aggregate_loss_curve.abscissae.tolist()
            curve_data.poes = aggregate_loss_curve.ordinates.tolist()
            curve_data.save()

        event_loss_table_output = models.Output.objects.create_output(
            self.job, "Event Loss Table", "event_loss")

        for rupture_id, aggregate_loss in self.event_loss_table.items():
            models.EventLoss.objects.create(
                output=event_loss_table_output,
                rupture_id=rupture_id,
                aggregate_loss=aggregate_loss)
Example #3
0
    def test_mean_based_with_partial_correlation(self):
        # This is a regression test. Data has not been checked
        vf = (
            scientific.VulnerabilityFunction(
                'PGA',
                [0.001, 0.2, 0.3, 0.5, 0.7],
                [0.01, 0.1, 0.2, 0.4, 0.8],
                [0.01, 0.02, 0.02, 0.01, 0.03]))
        gmvs = numpy.array([[10., 20., 30., 40., 50.],
                           [1., 2., 3., 4., 5.]])
        epsilons = scientific.make_epsilons(gmvs, seed=1, correlation=0.5)
        loss_matrix = vf.apply_to(gmvs, epsilons)

        losses, poes = scientific.event_based(loss_matrix[0], 120, 30, 4)
        first_curve_integral = scientific.average_loss(losses, poes)

        self.assertAlmostEqual(0.48983614471, first_curve_integral)

        wf = workflows.ProbabilisticEventBased(
            vulnerability_functions={self.loss_type: vf},
            time_span=50,
            tses=10000,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=False
            )
        out = wf(self.loss_type, assets, gmvs, epsilons, [1, 2, 3, 4, 5])
        self.assert_similar(
            out.event_loss_table,
            {1: 15.332714802464356,
             2: 16.21582466071975,
             3: 15.646630129345354,
             4: 15.285164778325353,
             5: 15.860930792931873,
             })
Example #4
0
    def test_mean_based_with_partial_correlation(self):
        # This is a regression test. Data has not been checked
        vf = (scientific.VulnerabilityFunction('SOME-TAXONOMY', 'PGA',
                                               [0.001, 0.2, 0.3, 0.5, 0.7],
                                               [0.01, 0.1, 0.2, 0.4, 0.8],
                                               [0.01, 0.02, 0.02, 0.01, 0.03]))
        gmvs = numpy.array([[10., 20., 30., 40., 50.], [1., 2., 3., 4., 5.]])
        epsilons = scientific.make_epsilons(gmvs, seed=1, correlation=0.5)
        loss_matrix = vf.apply_to(gmvs, epsilons)

        losses_poes = scientific.event_based(loss_matrix[0], 120, 30, 4)
        first_curve_integral = scientific.average_loss(losses_poes)

        self.assertAlmostEqual(0.48983614471, first_curve_integral)

        wf = workflows.ProbabilisticEventBased(
            'PGA',
            'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            investigation_time=50,
            risk_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=False)
        out = wf(self.loss_type, assets, gmvs, epsilons, [1, 2, 3, 4, 5])
        self.assert_similar(
            out.event_loss_table, {
                1: 15.332714802464356,
                2: 16.21582466071975,
                3: 15.646630129345354,
                4: 15.285164778325353,
                5: 15.860930792931873,
            })
    def test_insured_loss_mean_based(self):
        vf = scientific.VulnerabilityFunction(
            [0.001, 0.2, 0.3, 0.5, 0.7],
            [0.01, 0.1, 0.2, 0.4, 0.8],
            [0.0, 0.0, 0.0, 0.0, 0.0],
            "LN")

        loss_ratios = scientific.vulnerability_function_applier(
            vf, gmf[0:2])

        values = [3000, 1000]
        insured_limits = [1250., 40.]
        deductibles = [40, 13]

        insured_average_losses = [
            scientific.average_loss(*scientific.event_based(
                scientific.insured_losses(
                    loss_ratios,
                    deductibles[i] / values[i], insured_limits[i] / values[i]),
                50, 50, 20))
            for i, loss_ratios in enumerate(loss_ratios)]

        numpy.testing.assert_allclose(
            [207.86489132 / 3000,   38.07815797 / 1000],
            insured_average_losses)
Example #6
0
    def test_mean_based_with_partial_correlation(self):
        # This is a regression test. Data has not been checked
        vf = (
            scientific.VulnerabilityFunction(
                'SOME-TAXONOMY', 'PGA',
                [0.001, 0.2, 0.3, 0.5, 0.7],
                [0.01, 0.1, 0.2, 0.4, 0.8],
                [0.01, 0.02, 0.02, 0.01, 0.03]))
        gmvs = numpy.array([[10., 20., 30., 40., 50.],
                           [1., 2., 3., 4., 5.]])
        epsilons = scientific.make_epsilons(gmvs, seed=1, correlation=0.5)
        loss_matrix = vf.apply_to(gmvs, epsilons)

        losses_poes = scientific.event_based(loss_matrix[0], .25, 4)
        first_curve_integral = scientific.average_loss(losses_poes)

        self.assertAlmostEqual(0.48983614471, first_curve_integral)

        wf = workflows.ProbabilisticEventBased(
            'PGA', 'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            investigation_time=50,
            risk_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=False
            )
        wf.riskmodel = mock.MagicMock()
        out = wf(self.loss_type, assets, gmvs, epsilons, [1, 2, 3, 4, 5])
        numpy.testing.assert_almost_equal(
            out.average_losses, [0.01987912, 0.01929152])
Example #7
0
def build_agg_curve(lr_list, insured_losses, ses_ratio, curve_resolution,
                    monitor):
    """
    Build the aggregate loss curve in parallel for each loss type
    and realization pair.

    :param lr_list:
        a list of triples `(l, r, data)` where `l` is a loss type string,
        `r` as realization index and `data` is an array of pairs
        `(rupture_id, loss)` where loss is an array with two values
    :param insured_losses:
        job.ini configuration parameter
    :param ses_ratio:
        a ratio obtained from ses_per_logic_tree_path
    :param curve_resolution:
        the number of discretization steps for the loss curve
    :param monitor:
        a Monitor instance
    :returns:
        a dictionary (r, l, i) -> (losses, poes, avg)
    """
    result = {}
    for l, r, data in lr_list:
        if len(data) == 0:  # realization with no losses
            continue
        for i in range(insured_losses + 1):  # insured_losses
            the_losses = numpy.array(
                [loss[i] for _rupid, loss in data], F32)
            losses, poes = scientific.event_based(
                the_losses, ses_ratio, curve_resolution)
            avg = scientific.average_loss((losses, poes))
            result[l, r, i] = (losses, poes, avg)
    return result
Example #8
0
    def build_loss_curves(self, elass, loss_type, i):
        """
        Build loss curves per asset from a set of losses with length given by
        the parameter loss_curve_resolution.

        :param elass: a dict (loss_type, asset_id) -> (tag, loss, ins_loss)
        :param loss_type: the loss_type
        :param i: 1 for loss curves or 2 for insured losses
        :returns: an array of loss curves, one for each asset
        """
        oq = self.oqparam
        C = oq.loss_curve_resolution
        lcs = []
        for asset in self.assets:
            all_losses = [loss[i] for loss in elass[loss_type, asset.id]]
            if all_losses:
                losses, poes = scientific.event_based(
                    all_losses, tses=oq.tses,
                    time_span=oq.risk_investigation_time or
                    oq.investigation_time, curve_resolution=C)
                avg = scientific.average_loss((losses, poes))
            else:
                losses, poes = numpy.zeros(C), numpy.zeros(C)
                avg = 0
            lcs.append((losses, poes, avg))
        return numpy.array(lcs, self.loss_curve_dt)
Example #9
0
def build_agg_curve(lr_data, insured_losses, ses_ratio, curve_resolution, L,
                    monitor):
    """
    Build the aggregate loss curve in parallel for each loss type
    and realization pair.

    :param lr_data:
        a list of triples `(l, r, data)` where `l` is the loss type index,
        `r` is the realization index and `data` is an array of kind
        `(rupture_id, loss)` or `(rupture_id, loss, loss_ins)`
    :param bool insured_losses:
        job.ini configuration parameter
    :param ses_ratio:
        a ratio obtained from ses_per_logic_tree_path
    :param curve_resolution:
        the number of discretization steps for the loss curve
    :param L:
        the number of loss types
    :param monitor:
        a Monitor instance
    :returns:
        a dictionary (r, l, i) -> (losses, poes, avg)
    """
    result = {}
    for l, r, data in lr_data:
        if len(data) == 0:  # realization with no losses
            continue
        if insured_losses:
            gloss = data['loss'][:, 0]
            iloss = data['loss'][:, 1]
        else:
            gloss = data['loss']
        losses, poes = scientific.event_based(
            gloss, ses_ratio, curve_resolution)
        avg = scientific.average_loss((losses, poes))
        result[l, r, 'losses'] = losses
        result[l, r, 'poes'] = poes
        result[l, r, 'avg'] = avg
        if insured_losses:
            losses_ins, poes_ins = scientific.event_based(
                iloss, ses_ratio, curve_resolution)
            avg_ins = scientific.average_loss((losses_ins, poes_ins))
            result[l, r, 'losses_ins'] = losses_ins
            result[l, r, 'poes_ins'] = poes_ins
            result[l, r, 'avg_ins'] = avg_ins
    return result
Example #10
0
def build_agg_curve(lr_data, insured_losses, ses_ratio, curve_resolution, L,
                    monitor):
    """
    Build the aggregate loss curve in parallel for each loss type
    and realization pair.

    :param lr_data:
        a list of triples `(l, r, data)` where `l` is the loss type index,
        `r` is the realization index and `data` is an array of kind
        `(rupture_id, loss)` or `(rupture_id, loss, loss_ins)`
    :param bool insured_losses:
        job.ini configuration parameter
    :param ses_ratio:
        a ratio obtained from ses_per_logic_tree_path
    :param curve_resolution:
        the number of discretization steps for the loss curve
    :param L:
        the number of loss types
    :param monitor:
        a Monitor instance
    :returns:
        a dictionary (r, l, i) -> (losses, poes, avg)
    """
    result = {}
    for l, r, data in lr_data:
        if len(data) == 0:  # realization with no losses
            continue
        if insured_losses:
            gloss = data['loss'][:, 0]
            iloss = data['loss'][:, 1]
        else:
            gloss = data['loss']
        losses, poes = scientific.event_based(gloss, ses_ratio,
                                              curve_resolution)
        avg = scientific.average_loss((losses, poes))
        result[l, r, 'losses'] = losses
        result[l, r, 'poes'] = poes
        result[l, r, 'avg'] = avg
        if insured_losses:
            losses_ins, poes_ins = scientific.event_based(
                iloss, ses_ratio, curve_resolution)
            avg_ins = scientific.average_loss((losses_ins, poes_ins))
            result[l, r, 'losses_ins'] = losses_ins
            result[l, r, 'poes_ins'] = poes_ins
            result[l, r, 'avg_ins'] = avg_ins
    return result
Example #11
0
    def test_mean_based(self):
        epsilons = scientific.make_epsilons([gmf[0]], seed=1, correlation=0)
        vulnerability_function_rm = (
            scientific.VulnerabilityFunction(
                'RM', 'PGA',
                [0.001, 0.2, 0.3, 0.5, 0.7],
                [0.01, 0.1, 0.2, 0.4, 0.8],
                [0.0, 0.0, 0.0, 0.0, 0.0]))

        vulnerability_function_rc = (
            scientific.VulnerabilityFunction(
                'RC', 'PGA',
                [0.001, 0.2, 0.3, 0.5, 0.7],
                [0.0035, 0.07, 0.14, 0.28, 0.56],
                [0.0, 0.0, 0.0, 0.0, 0.0]))

        cr = 50  # curve resolution
        curve_rm_1 = scientific.event_based(
            vulnerability_function_rm.apply_to(
                [gmf[0]], epsilons)[0], 50, 50, cr)

        curve_rm_2 = scientific.event_based(
            vulnerability_function_rm.apply_to(
                [gmf[1]], epsilons)[0], 50, 50, cr)

        curve_rc = scientific.event_based(
            vulnerability_function_rc.apply_to(
                [gmf[2]], epsilons)[0], 50, 50, cr)

        for i, curve_rm in enumerate([curve_rm_1, curve_rm_2]):

            conditional_loss = scientific.conditional_loss_ratio(
                curve_rm[0], curve_rm[1], 0.8)
            self.assertAlmostEqual([0.0490311, 0.0428061][i], conditional_loss)

            self.assertAlmostEqual(
                [0.070219108, 0.04549904][i],
                scientific.average_loss(curve_rm))

        conditional_loss = scientific.conditional_loss_ratio(
            curve_rc[0], curve_rc[1], 0.8)
        self.assertAlmostEqual(0.0152273, conditional_loss)

        self.assertAlmostEqual(
            0.0152393, scientific.average_loss(curve_rc))
Example #12
0
    def test_mean_based(self):
        epsilons = scientific.make_epsilons([gmf[0]], seed=1, correlation=0)
        vulnerability_function_rm = (
            scientific.VulnerabilityFunction(
                'RM', 'PGA',
                [0.001, 0.2, 0.3, 0.5, 0.7],
                [0.01, 0.1, 0.2, 0.4, 0.8],
                [0.0, 0.0, 0.0, 0.0, 0.0]))

        vulnerability_function_rc = (
            scientific.VulnerabilityFunction(
                'RC', 'PGA',
                [0.001, 0.2, 0.3, 0.5, 0.7],
                [0.0035, 0.07, 0.14, 0.28, 0.56],
                [0.0, 0.0, 0.0, 0.0, 0.0]))

        cr = 50  # curve resolution
        curve_rm_1 = scientific.event_based(
            vulnerability_function_rm.apply_to(
                [gmf[0]], epsilons)[0], 1, cr)

        curve_rm_2 = scientific.event_based(
            vulnerability_function_rm.apply_to(
                [gmf[1]], epsilons)[0], 1, cr)

        curve_rc = scientific.event_based(
            vulnerability_function_rc.apply_to(
                [gmf[2]], epsilons)[0], 1, cr)

        for i, curve_rm in enumerate([curve_rm_1, curve_rm_2]):

            conditional_loss = scientific.conditional_loss_ratio(
                curve_rm[0], curve_rm[1], 0.8)
            self.assertAlmostEqual([0.0490311, 0.0428061][i], conditional_loss)

            self.assertAlmostEqual(
                [0.070219108, 0.04549904][i],
                scientific.average_loss(curve_rm))

        conditional_loss = scientific.conditional_loss_ratio(
            curve_rc[0], curve_rc[1], 0.8)
        self.assertAlmostEqual(0.0152273, conditional_loss)

        self.assertAlmostEqual(
            0.0152393, scientific.average_loss(curve_rc))
    def test_mean_based(self):
        vulnerability_function_rm = (
            scientific.VulnerabilityFunction(
                [0.001, 0.2, 0.3, 0.5, 0.7], [0.01, 0.1, 0.2, 0.4, 0.8],
                [0.0, 0.0, 0.0, 0.0, 0.0], "LN"))

        vulnerability_function_rc = (
            scientific.VulnerabilityFunction(
                [0.001, 0.2, 0.3, 0.5, 0.7], [0.0035, 0.07, 0.14, 0.28, 0.56],
                [0.0, 0.0, 0.0, 0.0, 0.0], "LN"))

        curve_rm_1 = scientific.event_based(
            scientific.vulnerability_function_applier(
                vulnerability_function_rm, [gmf[0]])[0], 50, 50)

        curve_rm_2 = scientific.event_based(
            scientific.vulnerability_function_applier(
                vulnerability_function_rm, [gmf[1]])[0], 50, 50)

        curve_rc = scientific.event_based(
            scientific.vulnerability_function_applier(
                vulnerability_function_rc, [gmf[2]])[0], 50, 50)

        for i, curve_rm in enumerate([curve_rm_1, curve_rm_2]):

            conditional_loss = scientific.conditional_loss_ratio(
                curve_rm[0], curve_rm[1], 0.8)
            self.assertAlmostEqual([0.0490311, 0.0428061][i], conditional_loss)

            self.assertAlmostEqual(
                [0.070219108, 0.04549904][i],
                scientific.average_loss(curve_rm[0], curve_rm[1]))

        conditional_loss = scientific.conditional_loss_ratio(
            curve_rc[0], curve_rc[1], 0.8)
        self.assertAlmostEqual(0.0152273, conditional_loss)

        self.assertAlmostEqual(
            0.0152393,
            scientific.average_loss(curve_rc[0], curve_rc[1]))
Example #14
0
def insured_losses(loss_type, unit, assets, loss_ratio_matrix):
    for asset, losses in zip(assets, loss_ratio_matrix):
        asset_insured_losses, poes = scientific.event_based(
            scientific.insured_losses(
                losses,
                asset.value(loss_type),
                asset.deductible(loss_type),
                asset.insurance_limit(loss_type)),
            tses=unit.calc.tses,
            time_span=unit.calc.time_span)
        # FIXME(lp). Insured losses are still computed as absolute
        # values.
        yield asset_insured_losses / asset.value(loss_type), poes
    def test_mean_based_with_partial_correlation(self):
        # This is a regression test. Data has not been checked
        vf = (
            scientific.VulnerabilityFunction(
                [0.001, 0.2, 0.3, 0.5, 0.7], [0.01, 0.1, 0.2, 0.4, 0.8],
                [0.01, 0.02, 0.02, 0.01, 0.03], "LN"))
        gmvs = numpy.array([[10., 20., 30., 40., 50.],
                           [1., 2., 3., 4., 5.]])
        loss_matrix = scientific.vulnerability_function_applier(
            vf, gmvs, seed=1, asset_correlation=0.5)

        losses, poes = scientific.event_based(loss_matrix[0], 120, 30, 4)
        first_curve_integral = scientific.average_loss(losses, poes)

        self.assertAlmostEqual(0.48983614471, first_curve_integral)
Example #16
0
    def post_process(self):
        # compute aggregate loss curves
        for hazard_output in self.considered_hazard_outputs():
            loss_curve = models.LossCurve.objects.get(
                hazard_output=hazard_output,
                aggregate=True, output__oq_job=self.job)
            curve_data = loss_curve.aggregatelosscurvedata

            tses, time_span = self.hazard_times()

            aggregate_loss_curve = scientific.event_based(
                curve_data.losses, tses, time_span,
                curve_resolution=self.rc.loss_curve_resolution)

            curve_data.losses = aggregate_loss_curve.abscissae.tolist()
            curve_data.poes = aggregate_loss_curve.ordinates.tolist()
            curve_data.save()
Example #17
0
    def test_insured_loss_mean_based(self):
        vf = scientific.VulnerabilityFunction(
            'VF', 'PGA',
            [0.001, 0.2, 0.3, 0.5, 0.7],
            [0.01, 0.1, 0.2, 0.4, 0.8],
            [0.0, 0.0, 0.0, 0.0, 0.0])

        epsilons = scientific.make_epsilons(gmf[0:2], seed=1, correlation=0)
        loss_ratios = vf.apply_to(gmf[0:2], epsilons)

        values = [3000, 1000]
        insured_limits = [1250., 40.]
        deductibles = [40, 13]

        insured_average_losses = [
            scientific.average_loss(scientific.event_based(
                scientific.insured_losses(
                    lrs,
                    deductibles[i] / values[i], insured_limits[i] / values[i]),
                50, 50, 20))
            for i, lrs in enumerate(loss_ratios)]

        numpy.testing.assert_allclose(
            [207.86489132 / 3000,   38.07815797 / 1000],
            insured_average_losses)

        wf = workflows.ProbabilisticEventBased(
            'PGA', 'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            risk_investigation_time=50,
            hazard_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=True
            )
        out = wf(self.loss_type, assets, gmf[0:2], epsilons, [1, 2, 3, 4, 5])
        self.assert_similar(
            out.event_loss_table,
            {1: 0.20314761658291458,
             2: 0,
             3: 0,
             4: 0,
             5: 0,
             })
Example #18
0
    def build_agg_loss_curve_and_map(self, losses):
        """
        Build a loss curve from a set of losses with length given by
        the parameter loss_curve_resolution.

        :param losses: a sequence of losses
        :returns: a quartet (losses, poes, avg, loss_map)
        """
        oq = self.oqparam
        clp = oq.conditional_loss_poes
        losses_poes = scientific.event_based(
            losses, tses=oq.tses, time_span=oq.risk_investigation_time or
            oq.investigation_time, curve_resolution=oq.loss_curve_resolution)
        loss_map = scientific.loss_map_matrix(
            clp, [losses_poes]).reshape(len(clp)) if clp else None
        return (losses_poes[0], losses_poes[1],
                scientific.average_loss(losses_poes), loss_map)
Example #19
0
    def __call__(self, ground_motion_fields):
        if not len(ground_motion_fields):
            return numpy.array([[]]), []

        self.vulnerability_function.init_distribution(
            len(ground_motion_fields), len(ground_motion_fields[0]),
            self.seed, self.correlation)

        loss_ratios = [
            self.vulnerability_function(ground_motion_field)
            for ground_motion_field in ground_motion_fields]

        return (loss_ratios,
                [scientific.event_based(
                    asset_loss_ratios,
                    tses=self.tses, time_span=self.time_span,
                    curve_resolution=self.curve_resolution)
                    for asset_loss_ratios in loss_ratios])
Example #20
0
    def test_insured_loss_mean_based(self):
        vf = scientific.VulnerabilityFunction('VF', 'PGA',
                                              [0.001, 0.2, 0.3, 0.5, 0.7],
                                              [0.01, 0.1, 0.2, 0.4, 0.8],
                                              [0.0, 0.0, 0.0, 0.0, 0.0])

        epsilons = scientific.make_epsilons(gmf[0:2], seed=1, correlation=0)
        loss_ratios = vf.apply_to(gmf[0:2], epsilons)

        values = [3000., 1000.]
        insured_limits = [1250., 40.]
        deductibles = [40., 13.]

        insured_average_losses = [
            scientific.average_loss(
                scientific.event_based(
                    scientific.insured_losses(lrs, deductibles[i] / values[i],
                                              insured_limits[i] / values[i]),
                    50, 50, 20)) for i, lrs in enumerate(loss_ratios)
        ]
        numpy.testing.assert_allclose([0.05667045, 0.02542965],
                                      insured_average_losses)

        wf = workflows.ProbabilisticEventBased(
            'PGA',
            'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            investigation_time=50,
            risk_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=True)
        out = wf(self.loss_type, assets, gmf[0:2], epsilons, [1, 2, 3, 4, 5])
        self.assert_similar(out.event_loss_table, {
            1: 0.20314761658291458,
            2: 0,
            3: 0,
            4: 0,
            5: 0,
        })
    def test_insured_loss_mean_based(self):
        vf = scientific.VulnerabilityFunction(
            'VF', 'PGA',
            [0.001, 0.2, 0.3, 0.5, 0.7],
            [0.01, 0.1, 0.2, 0.4, 0.8],
            [0.0, 0.0, 0.0, 0.0, 0.0])

        epsilons = scientific.make_epsilons(gmf[0:2], seed=1, correlation=0)
        loss_ratios = vf.apply_to(gmf[0:2], epsilons)

        values = [3000., 1000.]
        insured_limits = [1250., 40.]
        deductibles = [40., 13.]

        insured_average_losses = [
            scientific.average_loss(scientific.event_based(
                scientific.insured_losses(
                    lrs,
                    deductibles[i] / values[i], insured_limits[i] / values[i]),
                1, 20))
            for i, lrs in enumerate(loss_ratios)]
        numpy.testing.assert_allclose([0.05667045, 0.02542965],
                                      insured_average_losses)

        wf = workflows.ProbabilisticEventBased(
            'PGA', 'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            investigation_time=50,
            risk_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=True
            )
        wf.riskmodel = mock.MagicMock()
        out = wf(self.loss_type, assets, gmf[0:2], epsilons, [1, 2, 3, 4, 5])
        numpy.testing.assert_almost_equal(
            out.average_losses, [0.00473820568, 0.0047437959417])
        numpy.testing.assert_almost_equal(
            out.average_insured_losses, [0, 0])
Example #22
0
    def test_mean_based_with_no_correlation(self):
        # This is a regression test. Data has not been checked
        vf = (
            scientific.VulnerabilityFunction(
                'SOME-TAXONOMY', 'PGA',
                [0.001, 0.2, 0.3, 0.5, 0.7],
                [0.01, 0.1, 0.2, 0.4, 0.8],
                [0.01, 0.02, 0.02, 0.01, 0.03]))
        gmvs = numpy.array([[10., 20., 30., 40., 50.],
                            [1., 2., 3., 4., 5.]])

        epsilons = scientific.make_epsilons(gmvs, seed=1, correlation=0)
        loss_matrix = vf.apply_to(gmvs, epsilons)
        losses_poes = scientific.event_based(
            loss_matrix[0], 120, 30, curve_resolution=4)

        first_curve_integral = scientific.average_loss(losses_poes)

        self.assertAlmostEqual(0.500993631, first_curve_integral)

        wf = workflows.ProbabilisticEventBased(
            'PGA', 'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            risk_investigation_time=50,
            hazard_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=False
            )
        out = wf(self.loss_type, assets, gmvs, epsilons, [1, 2, 3, 4, 5])
        self.assert_similar(
            out.event_loss_table,
            {1: 16.246646231503398,
             2: 15.613885199116158,
             3: 15.669704465134854,
             4: 16.241922530992454,
             5: 16.010104452203464,
             })
Example #23
0
    def test_mean_based_with_perfect_correlation(self):
        # This is a regression test. Data has not been checked
        vf = (
            scientific.VulnerabilityFunction(
                'SOME-TAXONOMY', 'PGA',
                [0.001, 0.2, 0.3, 0.5, 0.7],
                [0.01, 0.1, 0.2, 0.4, 0.8],
                [0.01, 0.02, 0.02, 0.01, 0.03]))

        gmvs = [[10., 20., 30., 40., 50.],
                [1., 2., 3., 4., 5.]]

        epsilons = scientific.make_epsilons(gmvs, seed=1, correlation=1)
        loss_matrix = vf.apply_to(gmvs, epsilons)
        losses_poes = scientific.event_based(loss_matrix[0], 120, 30, 4)

        first_curve_integral = scientific.average_loss(losses_poes)

        self.assertAlmostEqual(0.483041416, first_curve_integral)

        wf = workflows.ProbabilisticEventBased(
            'PGA', 'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            risk_investigation_time=50,
            hazard_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=False
            )
        out = wf(self.loss_type, assets, gmvs, epsilons, [1, 2, 3, 4, 5])
        self.assert_similar(
            out.event_loss_table,
            {1: 15.232320555463319,
             2: 16.248173683693864,
             3: 15.583030510462981,
             4: 15.177382760499968,
             5: 15.840499250058254,
             })
Example #24
0
    def test_mean_based_with_no_correlation(self):
        # This is a regression test. Data has not been checked
        vf = (scientific.VulnerabilityFunction('SOME-TAXONOMY', 'PGA',
                                               [0.001, 0.2, 0.3, 0.5, 0.7],
                                               [0.01, 0.1, 0.2, 0.4, 0.8],
                                               [0.01, 0.02, 0.02, 0.01, 0.03]))
        gmvs = numpy.array([[10., 20., 30., 40., 50.], [1., 2., 3., 4., 5.]])

        epsilons = scientific.make_epsilons(gmvs, seed=1, correlation=0)
        loss_matrix = vf.apply_to(gmvs, epsilons)
        losses_poes = scientific.event_based(loss_matrix[0],
                                             120,
                                             30,
                                             curve_resolution=4)

        first_curve_integral = scientific.average_loss(losses_poes)

        self.assertAlmostEqual(0.500993631, first_curve_integral)

        wf = workflows.ProbabilisticEventBased(
            'PGA',
            'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            investigation_time=50,
            risk_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=False)
        out = wf(self.loss_type, assets, gmvs, epsilons, [1, 2, 3, 4, 5])
        self.assert_similar(
            out.event_loss_table, {
                1: 16.246646231503398,
                2: 15.613885199116158,
                3: 15.669704465134854,
                4: 16.241922530992454,
                5: 16.010104452203464,
            })
Example #25
0
    def test_insured_loss_mean_based(self):
        vulnerability_function_rm = (
            scientific.VulnerabilityFunction(
                [0.001, 0.2, 0.3, 0.5, 0.7], [0.01, 0.1, 0.2, 0.4, 0.8],
                [0.0, 0.0, 0.0, 0.0, 0.0], "LN"))

        vulnerability_function_rc = (
            scientific.VulnerabilityFunction(
                [0.001, 0.2, 0.3, 0.5, 0.7], [0.0035, 0.07, 0.14, 0.28, 0.56],
                [0.0, 0.0, 0.0, 0.0, 0.0], "LN"))

        calculator_rm = api.ProbabilisticEventBased(
            vulnerability_function_rm, time_span=50, tses=50,
            curve_resolution=20)

        calculator_rc = api.ProbabilisticEventBased(
            vulnerability_function_rc, time_span=50, tses=50,
            curve_resolution=20)

        loss_ratios_rm, _curves_rm = calculator_rm(gmf[0:2])
        loss_ratios_rc, [_curve_rc] = calculator_rc([gmf[2]])

        values = [3000, 1000, 2000]
        insured_limits = [1250., 40., 500.]
        deductibles = [40, 13, 15]

        insured_losses = [scientific.event_based(
            scientific.insured_losses(
                loss_ratios, values[i], deductibles[i], insured_limits[i]),
            50, 50, 20)
            for i, loss_ratios in enumerate(loss_ratios_rm + loss_ratios_rc)]

        for i, insured_loss_curve in enumerate(insured_losses):
            numpy.testing.assert_allclose(
                il.expected_poes[i], insured_loss_curve.ordinates, rtol=10E-5)

            numpy.testing.assert_allclose(
                il.expected_losses[i],
                insured_loss_curve.abscissae, rtol=10E-5)
Example #26
0
    def test_mean_based_with_perfect_correlation(self):
        # This is a regression test. Data has not been checked
        vf = (scientific.VulnerabilityFunction('SOME-TAXONOMY', 'PGA',
                                               [0.001, 0.2, 0.3, 0.5, 0.7],
                                               [0.01, 0.1, 0.2, 0.4, 0.8],
                                               [0.01, 0.02, 0.02, 0.01, 0.03]))

        gmvs = [[10., 20., 30., 40., 50.], [1., 2., 3., 4., 5.]]

        epsilons = scientific.make_epsilons(gmvs, seed=1, correlation=1)
        loss_matrix = vf.apply_to(gmvs, epsilons)
        losses_poes = scientific.event_based(loss_matrix[0], 120, 30, 4)

        first_curve_integral = scientific.average_loss(losses_poes)

        self.assertAlmostEqual(0.483041416, first_curve_integral)

        wf = workflows.ProbabilisticEventBased(
            'PGA',
            'SOME-TAXONOMY',
            vulnerability_functions={self.loss_type: vf},
            investigation_time=50,
            risk_investigation_time=50,
            ses_per_logic_tree_path=200,
            number_of_logic_tree_samples=0,
            loss_curve_resolution=4,
            conditional_loss_poes=[0.1, 0.5, 0.9],
            insured_losses=False)
        out = wf(self.loss_type, assets, gmvs, epsilons, [1, 2, 3, 4, 5])
        self.assert_similar(
            out.event_loss_table, {
                1: 15.232320555463319,
                2: 16.248173683693864,
                3: 15.583030510462981,
                4: 15.177382760499968,
                5: 15.840499250058254,
            })
Example #27
0
    def test_zero_curve(self):
        expected = [0.] * 100
        losses, poes = scientific.event_based(expected, 50, 50, 11)

        numpy.testing.assert_allclose([0.] * 11, losses)
        numpy.testing.assert_allclose([0.] * 11, poes, atol=1E-10)
Example #28
0
    def post_process(self):
        """
          Compute aggregate loss curves and event loss tables
        """
        oq = self.oqparam
        tses = oq.investigation_time * oq.ses_per_logic_tree_path
        with self.monitor('post processing', autoflush=True):
            inserter = writer.CacheInserter(models.EventLossData,
                                            max_cache_size=10000)
            for (loss_type, out_id), event_loss_table in self.acc.items():
                if out_id:  # values for individual realizations
                    hazard_output = models.Output.objects.get(pk=out_id)
                    event_loss = models.EventLoss.objects.get(
                        output__oq_job=self.job,
                        output__output_type='event_loss',
                        loss_type=loss_type, hazard_output=hazard_output)
                    if isinstance(hazard_output.output_container,
                                  models.SESCollection):
                        ses_coll = hazard_output.output_container
                        rupture_ids = ses_coll.get_ruptures().values_list(
                            'id', flat=True)
                    else:  # extract the SES collection from the Gmf
                        rupture_ids = models.SESRupture.objects.filter(
                            rupture__ses_collection__trt_model__lt_model=
                            hazard_output.output_container.
                            lt_realization.lt_model).values_list(
                            'id', flat=True)
                    for rupture_id in rupture_ids:
                        if rupture_id in event_loss_table:
                            inserter.add(
                                models.EventLossData(
                                    event_loss_id=event_loss.id,
                                    rupture_id=rupture_id,
                                    aggregate_loss=event_loss_table[
                                        rupture_id]))
                    inserter.flush()

                    aggregate_losses = [
                        event_loss_table[rupture_id]
                        for rupture_id in rupture_ids
                        if rupture_id in event_loss_table]

                    if aggregate_losses:
                        aggregate_loss = scientific.event_based(
                            aggregate_losses, tses=tses,
                            time_span=oq.investigation_time,
                            curve_resolution=oq.loss_curve_resolution)

                        models.AggregateLossCurveData.objects.create(
                            loss_curve=models.LossCurve.objects.create(
                                aggregate=True, insured=False,
                                hazard_output=hazard_output,
                                loss_type=loss_type,
                                output=models.Output.objects.create_output(
                                    self.job,
                                    "aggregate loss curves. "
                                    "loss_type=%s hazard=%s" % (
                                        loss_type, hazard_output),
                                    "agg_loss_curve")),
                            losses=aggregate_loss[0],
                            poes=aggregate_loss[1],
                            average_loss=scientific.average_loss(
                                aggregate_loss),
                            stddev_loss=numpy.std(aggregate_losses))
Example #29
0
    def post_process(self):
        """
          Compute aggregate loss curves and event loss tables
        """
        with EnginePerformanceMonitor('post processing', self.job.id):

            time_span, tses = self.hazard_times()
            for loss_type, event_loss_table in self.event_loss_tables.items():
                for hazard_output in self.rc.hazard_outputs():

                    event_loss = models.EventLoss.objects.create(
                        output=models.Output.objects.create_output(
                            self.job,
                            "Event Loss Table. type=%s, hazard=%s" % (
                                loss_type, hazard_output.id),
                            "event_loss"),
                        loss_type=loss_type,
                        hazard_output=hazard_output)
                    inserter = writer.CacheInserter(models.EventLossData, 9999)

                    ruptures = models.SESRupture.objects.filter(
                        ses__ses_collection__lt_realization=
                        hazard_output.output_container.lt_realization)

                    for rupture in ruptures:
                        if rupture.id in event_loss_table:
                            inserter.add(
                                models.EventLossData(
                                    event_loss_id=event_loss.id,
                                    rupture_id=rupture.id,
                                    aggregate_loss=event_loss_table[
                                        rupture.id]))
                    inserter.flush()

                    aggregate_losses = [
                        event_loss_table[rupture.id]
                        for rupture in ruptures
                        if rupture.id in event_loss_table]

                    if aggregate_losses:
                        aggregate_loss_losses, aggregate_loss_poes = (
                            scientific.event_based(
                                aggregate_losses, tses=tses,
                                time_span=time_span,
                                curve_resolution=self.rc.loss_curve_resolution
                            ))

                        models.AggregateLossCurveData.objects.create(
                            loss_curve=models.LossCurve.objects.create(
                                aggregate=True, insured=False,
                                hazard_output=hazard_output,
                                loss_type=loss_type,
                                output=models.Output.objects.create_output(
                                    self.job,
                                    "aggregate loss curves. "
                                    "loss_type=%s hazard=%s" % (
                                        loss_type, hazard_output),
                                    "agg_loss_curve")),
                            losses=aggregate_loss_losses,
                            poes=aggregate_loss_poes,
                            average_loss=scientific.average_loss(
                                aggregate_loss_losses, aggregate_loss_poes),
                            stddev_loss=numpy.std(aggregate_losses))
Example #30
0
    def post_process(self):
        """
          Compute aggregate loss curves and event loss tables
        """
        with self.monitor('post processing'):
            inserter = writer.CacheInserter(models.EventLossData,
                                            max_cache_size=10000)
            time_span, tses = self.hazard_times()
            for (loss_type, out_id), event_loss_table in self.acc.items():
                if out_id:  # values for individual realizations
                    hazard_output = models.Output.objects.get(pk=out_id)
                    event_loss = models.EventLoss.objects.get(
                        output__oq_job=self.job,
                        output__output_type='event_loss',
                        loss_type=loss_type, hazard_output=hazard_output)
                    if isinstance(hazard_output.output_container,
                                  models.SESCollection):
                        ses_coll = hazard_output.output_container
                        rupture_ids = ses_coll.get_ruptures().values_list(
                            'id', flat=True)
                    else:  # extract the SES collection from the Gmf
                        rupture_ids = models.SESRupture.objects.filter(
                            rupture__ses_collection__trt_model__lt_model=
                            hazard_output.output_container.
                            lt_realization.lt_model).values_list(
                            'id', flat=True)
                    for rupture_id in rupture_ids:
                        if rupture_id in event_loss_table:
                            inserter.add(
                                models.EventLossData(
                                    event_loss_id=event_loss.id,
                                    rupture_id=rupture_id,
                                    aggregate_loss=event_loss_table[
                                        rupture_id]))
                    inserter.flush()

                    aggregate_losses = [
                        event_loss_table[rupture_id]
                        for rupture_id in rupture_ids
                        if rupture_id in event_loss_table]

                    if aggregate_losses:
                        aggregate_loss_losses, aggregate_loss_poes = (
                            scientific.event_based(
                                aggregate_losses, tses=tses,
                                time_span=time_span,
                                curve_resolution=self.rc.loss_curve_resolution
                            ))

                        models.AggregateLossCurveData.objects.create(
                            loss_curve=models.LossCurve.objects.create(
                                aggregate=True, insured=False,
                                hazard_output=hazard_output,
                                loss_type=loss_type,
                                output=models.Output.objects.create_output(
                                    self.job,
                                    "aggregate loss curves. "
                                    "loss_type=%s hazard=%s" % (
                                        loss_type, hazard_output),
                                    "agg_loss_curve")),
                            losses=aggregate_loss_losses,
                            poes=aggregate_loss_poes,
                            average_loss=scientific.average_loss(
                                aggregate_loss_losses, aggregate_loss_poes),
                            stddev_loss=numpy.std(aggregate_losses))
Example #31
0
    def post_process(self):
        """
          Compute aggregate loss curves and event loss tables
        """
        with EnginePerformanceMonitor('post processing', self.job.id):

            time_span, tses = self.hazard_times()
            for loss_type, event_loss_table in self.event_loss_tables.items():
                for hazard_output in self.rc.hazard_outputs():

                    event_loss = models.EventLoss.objects.create(
                        output=models.Output.objects.create_output(
                            self.job, "Event Loss Table. type=%s, hazard=%s" %
                            (loss_type, hazard_output.id), "event_loss"),
                        loss_type=loss_type,
                        hazard_output=hazard_output)
                    inserter = writer.CacheInserter(models.EventLossData, 9999)

                    rupture_ids = models.SESRupture.objects.filter(
                        ses__ses_collection__lt_realization=hazard_output.
                        output_container.lt_realization).values_list('id',
                                                                     flat=True)

                    for rupture_id in rupture_ids:
                        if rupture_id in event_loss_table:
                            inserter.add(
                                models.EventLossData(
                                    event_loss_id=event_loss.id,
                                    rupture_id=rupture_id,
                                    aggregate_loss=event_loss_table[rupture_id]
                                ))
                    inserter.flush()

                    aggregate_losses = [
                        event_loss_table[rupture_id]
                        for rupture_id in rupture_ids
                        if rupture_id in event_loss_table
                    ]

                    if aggregate_losses:
                        aggregate_loss_losses, aggregate_loss_poes = (
                            scientific.event_based(
                                aggregate_losses,
                                tses=tses,
                                time_span=time_span,
                                curve_resolution=self.rc.loss_curve_resolution)
                        )

                        models.AggregateLossCurveData.objects.create(
                            loss_curve=models.LossCurve.objects.create(
                                aggregate=True,
                                insured=False,
                                hazard_output=hazard_output,
                                loss_type=loss_type,
                                output=models.Output.objects.create_output(
                                    self.job, "aggregate loss curves. "
                                    "loss_type=%s hazard=%s" %
                                    (loss_type, hazard_output),
                                    "agg_loss_curve")),
                            losses=aggregate_loss_losses,
                            poes=aggregate_loss_poes,
                            average_loss=scientific.average_loss(
                                aggregate_loss_losses, aggregate_loss_poes),
                            stddev_loss=numpy.std(aggregate_losses))
Example #32
0
    def test_zero_curve(self):
        expected = [0.] * 100
        losses, poes = scientific.event_based(expected, 1, 11)

        numpy.testing.assert_allclose([0.] * 11, losses)
        numpy.testing.assert_allclose([0.] * 11, poes, atol=1E-10)
Example #33
0
def event_based(job_id, hazard,
                seed, vulnerability_function,
                output_containers,
                conditional_loss_poes, insured_losses,
                time_span, tses,
                loss_curve_resolution, asset_correlation,
                hazard_montecarlo_p):
    """
    Celery task for the event based risk calculator.

    :param job_id: the id of the current
        :class:`openquake.engine.db.models.OqJob`
    :param dict hazard:
      A dictionary mapping IDs of
      :class:`openquake.engine.db.models.Output` (with output_type set
      to 'gmf_collection') to a tuple where the first element is an
      instance of
      :class:`..hazard_getters.GroundMotionValuesGetter`,
      and the second element is the corresponding weight.
    :param seed:
      the seed used to initialize the rng
    :param dict output_containers: a dictionary mapping hazard Output
      ID to a list (a, b, c, d) where a is the ID of the
      :class:`openquake.engine.db.models.LossCurve` output container used to
      store the computed loss curves; b is the dictionary poe->ID of
      the :class:`openquake.engine.db.models.LossMap` output container used
      to store the computed loss maps; c is the same as a but for
      insured losses; d is the ID of the
      :class:`openquake.engine.db.models.AggregateLossCurve` output container
      used to store the computed loss curves
    :param conditional_loss_poes:
      The poes taken into accout to compute the loss maps
    :param bool insured_losses: True if insured losses should be computed
    :param time_span: the time span considered
    :param tses: time of the stochastic event set
    :param loss_curve_resolution: the curve resolution, i.e. the
    number of points which defines the loss curves
    :param float asset_correlation: a number ranging from 0 to 1
    representing the correlation between the generated loss ratios
    """

    loss_ratio_curves = OrderedDict()
    event_loss_table = dict()

    for hazard_output_id, hazard_data in hazard.items():
        hazard_getter, _ = hazard_data

        (loss_curve_id, loss_map_ids,
         mean_loss_curve_id, quantile_loss_curve_ids,
         insured_curve_id, aggregate_loss_curve_id) = (
             output_containers[hazard_output_id])

        # FIXME(lp). We should not pass the exact same seed for
        # different hazard
        calculator = api.ProbabilisticEventBased(
            vulnerability_function,
            curve_resolution=loss_curve_resolution,
            time_span=time_span,
            tses=tses,
            seed=seed,
            correlation=asset_correlation)

        with logs.tracing('getting input data from db'):
            assets, gmvs_ruptures, missings = hazard_getter()

        if len(assets):
            ground_motion_values = numpy.array(gmvs_ruptures)[:, 0]
            rupture_id_matrix = numpy.array(gmvs_ruptures)[:, 1]
        else:
            # we are relying on the fact that if all the hazard_getter
            # in this task will either return some results or they all
            # return an empty result set.
            logs.LOG.info("Exit from task as no asset could be processed")
            base.signal_task_complete(
                job_id=job_id,
                event_loss_table=dict(),
                num_items=len(missings))
            return

        with logs.tracing('computing risk'):
            loss_ratio_matrix, loss_ratio_curves[hazard_output_id] = (
                calculator(ground_motion_values))

        with logs.tracing('writing results'):
            with db.transaction.commit_on_success(using='reslt_writer'):
                for i, loss_ratio_curve in enumerate(
                        loss_ratio_curves[hazard_output_id]):
                    asset = assets[i]

                    # loss curves
                    general.write_loss_curve(
                        loss_curve_id, asset, loss_ratio_curve)

                    # loss maps
                    for poe in conditional_loss_poes:
                        general.write_loss_map_data(
                            loss_map_ids[poe], asset,
                            scientific.conditional_loss_ratio(
                                loss_ratio_curve, poe))

                    # insured losses
                    if insured_losses:
                        insured_loss_curve = scientific.event_based(
                            scientific.insured_losses(
                                loss_ratio_matrix[i],
                                asset.value,
                                asset.deductible,
                                asset.ins_limit),
                            tses,
                            time_span,
                            loss_curve_resolution)

                        insured_loss_curve.abscissae = (
                            insured_loss_curve.abscissae / asset.value)
                        general.write_loss_curve(
                            insured_curve_id, asset, insured_loss_curve)

                # update the event loss table of this task
                for i, asset in enumerate(assets):
                    for j, rupture_id in enumerate(rupture_id_matrix[i]):
                        loss = loss_ratio_matrix[i][j] * asset.value
                        event_loss_table[rupture_id] = (
                            event_loss_table.get(rupture_id, 0) + loss)

                # update the aggregate losses
                aggregate_losses = sum(
                    loss_ratio_matrix[i] * asset.value
                    for i, asset in enumerate(assets))
                general.update_aggregate_losses(
                    aggregate_loss_curve_id, aggregate_losses)

    # compute mean and quantile loss curves if multiple hazard
    # realizations are computed
    if len(hazard) > 1 and (mean_loss_curve_id or quantile_loss_curve_ids):
        weights = [data[1] for _, data in hazard.items()]

        with logs.tracing('writing curve statistics'):
            with db.transaction.commit_on_success(using='reslt_writer'):
                loss_ratio_curve_matrix = loss_ratio_curves.values()

                # here we are relying on the fact that assets do not
                # change across different logic tree realizations (as
                # the hazard grid does not change, so the hazard
                # getters always returns the same assets)
                for i, asset in enumerate(assets):
                    general.curve_statistics(
                        asset,
                        loss_ratio_curve_matrix[i],
                        weights,
                        mean_loss_curve_id,
                        quantile_loss_curve_ids,
                        hazard_montecarlo_p,
                        assume_equal="image")

    base.signal_task_complete(job_id=job_id,
                              num_items=len(assets) + len(missings),
                              event_loss_table=event_loss_table)