Example #1
0
def event_based_bcr(job_id, hazard, seed,
                    vulnerability_function, vulnerability_function_retrofitted,
                    output_containers, time_span, tses,
                    loss_curve_resolution, asset_correlation,
                    asset_life_expectancy, interest_rate):
    """
    Celery task for the BCR risk calculator based on the event based
    calculator.

    Instantiates risklib calculators, computes bcr
    and stores results to db in a single transaction.

    :param int job_id:
        ID of the currently running job.
    :param dict hazard:
      A dictionary mapping IDs of
      :class:`openquake.engine.db.models.Output` (with output_type set
      to 'gmf_collection') to a tuple where the first element is a list
      of list (one for each asset) with the ground motion values used by the
      calculation, and the second element is the corresponding weight.
    :param output_containers: A dictionary mapping hazard Output ID to
      a tuple with only the ID of the
      :class:`openquake.engine.db.models.BCRDistribution` output container
      used to store the computed bcr distribution
    :param float time_span:
        Time Span of the hazard calculation.
    :param float tses:
        Time of the Stochastic Event Set.
    :param int loss_curve_resolution:
        Resolution of the computed loss curves (number of points).
    :param int seed:
        Seed used to generate random values.
    :param float asset_correlation:
        asset correlation (0 uncorrelated, 1 perfectly correlated).
    :param float interest_rate
        The interest rate used in the Cost Benefit Analysis.
    :param float asset_life_expectancy
        The life expectancy used for every asset.
    """

    for hazard_output_id, hazard_data in hazard.items():
        hazard_getter, _ = hazard_data
        (bcr_distribution_id,) = output_containers[hazard_output_id]

        # FIXME(lp). We should not pass the exact same seed for
        # different hazard
        calc_original = api.ProbabilisticEventBased(
            vulnerability_function, curve_resolution=loss_curve_resolution,
            time_span=time_span, tses=tses,
            seed=seed, correlation=asset_correlation)

        calc_retrofitted = api.ProbabilisticEventBased(
            vulnerability_function_retrofitted,
            curve_resolution=loss_curve_resolution,
            time_span=time_span, tses=tses,
            seed=seed, correlation=asset_correlation)

        with logs.tracing('getting hazard'):
            assets, gmvs_ruptures, missings = hazard_getter()
            if len(assets):
                ground_motion_values = numpy.array(gmvs_ruptures)[:, 0]
            else:
                # we are relying on the fact that if all the
                # hazard_getter in this task will either return some
                # results or they all return an empty result set.
                logs.LOG.info("Exit from task as no asset could be processed")
                base.signal_task_complete(job_id=job_id,
                                          num_items=len(missings))
                return

        with logs.tracing('computing risk'):
            _, original_loss_curves = calc_original(ground_motion_values)
            _, retrofitted_loss_curves = calc_retrofitted(ground_motion_values)

            eal_original = [
                scientific.mean_loss(*original_loss_curves[i].xy)
                for i in range(len(assets))]

            eal_retrofitted = [
                scientific.mean_loss(*retrofitted_loss_curves[i].xy)
                for i in range(len(assets))]

            bcr_results = [
                scientific.bcr(
                    eal_original[i], eal_retrofitted[i],
                    interest_rate, asset_life_expectancy,
                    asset.value, asset.retrofitting_cost)
                for i, asset in enumerate(assets)]

        with logs.tracing('writing results'):
            with transaction.commit_on_success(using='reslt_writer'):
                for i, asset in enumerate(assets):
                    general.write_bcr_distribution(
                        bcr_distribution_id, asset,
                        eal_original[i], eal_retrofitted[i], bcr_results[i])

    base.signal_task_complete(job_id=job_id,
                              num_items=len(assets) + len(missings))
Example #2
0
File: core.py Project: 4x/oq-engine
def classical_bcr(
    job_id,
    hazard,
    vulnerability_function,
    vulnerability_function_retrofitted,
    output_containers,
    lrem_steps_per_interval,
    asset_life_expectancy,
    interest_rate,
):
    """
    Celery task for the BCR risk calculator based on the classical
    calculator.

    Instantiates risklib calculators, computes BCR and stores the
    results to db in a single transaction.

    :param int job_id:
      ID of the currently running job
    :param dict hazard:
      A dictionary mapping IDs of
      :class:`openquake.engine.db.models.Output` (with output_type set
      to 'hazard_curve') to a tuple where the first element is an instance of
      :class:`..hazard_getters.HazardCurveGetter, and the second element is the
      corresponding weight.
    :param output_containers: A dictionary mapping hazard Output ID to
      a tuple with only the ID of the
      :class:`openquake.engine.db.models.BCRDistribution` output container
      used to store the computed bcr distribution
    :param int lrem_steps_per_interval
      Steps per interval used to compute the Loss Ratio Exceedance matrix
    :param float interest_rate
      The interest rate used in the Cost Benefit Analysis
    :param float asset_life_expectancy
      The life expectancy used for every asset
    """

    calc_original = api.Classical(vulnerability_function, lrem_steps_per_interval)
    calc_retrofitted = api.Classical(vulnerability_function_retrofitted, lrem_steps_per_interval)

    for hazard_output_id, hazard_data in hazard.items():
        hazard_getter, _ = hazard_data
        (bcr_distribution_id,) = output_containers[hazard_output_id]

        with logs.tracing("getting hazard"):
            assets, hazard_curves, missings = hazard_getter()

        with logs.tracing("computing original losses"):
            original_loss_curves = calc_original(hazard_curves)
            retrofitted_loss_curves = calc_retrofitted(hazard_curves)

            eal_original = [scientific.mean_loss(*original_loss_curves[i].xy) for i in range(len(assets))]

            eal_retrofitted = [scientific.mean_loss(*retrofitted_loss_curves[i].xy) for i in range(len(assets))]

            bcr_results = [
                scientific.bcr(
                    eal_original[i],
                    eal_retrofitted[i],
                    interest_rate,
                    asset_life_expectancy,
                    asset.value,
                    asset.retrofitting_cost,
                )
                for i, asset in enumerate(assets)
            ]

        with logs.tracing("writing results"):
            with transaction.commit_on_success(using="reslt_writer"):
                for i, asset in enumerate(assets):
                    general.write_bcr_distribution(
                        bcr_distribution_id, asset, eal_original[i], eal_retrofitted[i], bcr_results[i]
                    )

    base.signal_task_complete(job_id=job_id, num_items=len(assets) + len(missings))