Пример #1
0
 def __init__(self, job):
     super(BaseHazardCalculator, self).__init__(job)
     # a dictionary trt_model_id -> num_ruptures
     self.num_ruptures = collections.defaultdict(int)
     # now a dictionary (trt_model_id, gsim) -> poes
     self.acc = general.AccumDict()
     self.hc = models.oqparam(self.job.id)
     self.mean_hazard_curves = getattr(
         self.hc, 'mean_hazard_curves', None)
     self.quantile_hazard_curves = getattr(
         self.hc, 'quantile_hazard_curves', ())
Пример #2
0
def list_outputs(job_id, full=True):
    """
    List the outputs for a given
    :class:`~openquake.engine.db.models.OqJob`.

    :param job_id:
        ID of a calculation.
    :param bool full:
        If True produce a full listing, otherwise a short version
    """
    outputs = get_outputs(job_id)
    if models.oqparam(job_id).calculation_mode == 'scenario':
        # ignore SES output
        outputs = outputs.filter(output_type='gmf_scenario')
    print_outputs_summary(outputs, full)
Пример #3
0
def list_outputs(job_id, full=True):
    """
    List the outputs for a given
    :class:`~openquake.engine.db.models.OqJob`.

    :param job_id:
        ID of a calculation.
    :param bool full:
        If True produce a full listing, otherwise a short version
    """
    outputs = get_outputs(job_id)
    if models.oqparam(job_id).calculation_mode == 'scenario':
        # ignore SES output
        outputs = outputs.filter(output_type='gmf_scenario')
    print_outputs_summary(outputs, full)
Пример #4
0
def gmfs(job_id, ses_ruptures, sitecol, imts, gmf_id):
    """
    :param int job_id: the current job ID
    :param ses_ruptures: a set of `SESRupture` instances
    :param sitecol: a `SiteCollection` instance
    :param imts: a list of hazardlib IMT instances
    :param int gmf_id: the ID of a `Gmf` instance
    """
    hc = models.oqparam(job_id)
    gsim = AVAILABLE_GSIMS[hc.gsim]()  # instantiate the GSIM class
    correlation_model = models.get_correl_model(
        models.OqJob.objects.get(pk=job_id))

    cache = collections.defaultdict(list)  # {site_id, imt -> gmvs}
    inserter = writer.CacheInserter(models.GmfData, 1000)
    # insert GmfData in blocks of 1000 sites

    # NB: ses_ruptures a non-empty list produced by the block_splitter
    rupture = ses_ruptures[0].rupture  # ProbabilisticRupture instance
    with EnginePerformanceMonitor('computing gmfs', job_id, gmfs):
        gmf = GmfComputer(rupture, sitecol, imts, [gsim],
                          getattr(hc, 'truncation_level', None),
                          correlation_model)
        gname = gsim.__class__.__name__
        for ses_rup in ses_ruptures:
            for (gname, imt), gmvs in gmf.compute(ses_rup.seed):
                for site_id, gmv in zip(sitecol.sids, gmvs):
                    # float may be needed below to convert 1x1 matrices
                    cache[site_id, imt].append((gmv, ses_rup.id))

    with EnginePerformanceMonitor('saving gmfs', job_id, gmfs):
        for (site_id, imt_str), data in cache.iteritems():
            imt = from_string(imt_str)
            gmvs, rup_ids = zip(*data)
            inserter.add(
                models.GmfData(
                    gmf_id=gmf_id,
                    task_no=0,
                    imt=imt[0],
                    sa_period=imt[1],
                    sa_damping=imt[2],
                    site_id=site_id,
                    rupture_ids=rup_ids,
                    gmvs=gmvs))
        inserter.flush()
Пример #5
0
    def create_ruptures(self):
        oqparam = models.oqparam(self.job.id)
        self.imts = map(
            from_string, sorted(oqparam.intensity_measure_types_and_levels))
        self.rupture = get_rupture(oqparam)

        # check filtering
        trunc_level = getattr(oqparam, 'truncation_level', None)
        maximum_distance = oqparam.maximum_distance
        self.sites = filters.filter_sites_by_distance_to_rupture(
            self.rupture, maximum_distance, self.site_collection)
        if self.sites is None:
            raise RuntimeError(
                'All sites where filtered out! '
                'maximum_distance=%s km' % maximum_distance)

        # create ses output
        output = models.Output.objects.create(
            oq_job=self.job,
            display_name='SES Collection',
            output_type='ses')
        self.ses_coll = models.SESCollection.create(output=output)

        # create gmf output
        output = models.Output.objects.create(
            oq_job=self.job,
            display_name="GMF",
            output_type="gmf_scenario")
        self.gmf = models.Gmf.objects.create(output=output)

        with self.monitor('saving ruptures'):
            self.tags = ['scenario-%010d' % i for i in xrange(
                oqparam.number_of_ground_motion_fields)]
            _, self.rupids, self.seeds = create_db_ruptures(
                self.rupture, self.ses_coll, self.tags,
                self.hc.random_seed)

        correlation_model = models.get_correl_model(
            models.OqJob.objects.get(pk=self.job.id))
        gsim = AVAILABLE_GSIMS[oqparam.gsim]()
        self.computer = GmfComputer(
            self.rupture, self.site_collection, self.imts, gsim,
            trunc_level, correlation_model)
Пример #6
0
    def create_ruptures(self):
        oqparam = models.oqparam(self.job.id)
        self.imts = map(from_string, oqparam.imtls)
        self.rupture = readinput.get_rupture(oqparam)

        # check filtering
        trunc_level = oqparam.truncation_level
        maximum_distance = oqparam.maximum_distance
        self.sites = filters.filter_sites_by_distance_to_rupture(
            self.rupture, maximum_distance, self.site_collection)
        if self.sites is None:
            raise RuntimeError(
                'All sites where filtered out! '
                'maximum_distance=%s km' % maximum_distance)

        # create ses output
        output = models.Output.objects.create(
            oq_job=self.job,
            display_name='SES Collection',
            output_type='ses')
        self.ses_coll = models.SESCollection.create(output=output)

        # create gmf output
        output = models.Output.objects.create(
            oq_job=self.job,
            display_name="GMF",
            output_type="gmf_scenario")
        self.gmf = models.Gmf.objects.create(output=output)

        with self.monitor('saving ruptures', autoflush=True):
            self.tags = ['scenario-%010d' % i for i in xrange(
                oqparam.number_of_ground_motion_fields)]
            _, self.rupids, self.seeds = create_db_ruptures(
                self.rupture, self.ses_coll, self.tags,
                self.oqparam.random_seed)

        correlation_model = models.get_correl_model(
            models.OqJob.objects.get(pk=self.job.id))
        gsim = valid.gsim(oqparam.gsim)
        self.computer = GmfComputer(
            self.rupture, self.sites, oqparam.imtls, [gsim],
            trunc_level, correlation_model)
Пример #7
0
    def create_ruptures(self):
        oqparam = models.oqparam(self.job.id)
        self.imts = map(from_string, oqparam.imtls)
        self.rupture = readinput.get_rupture(oqparam)

        # check filtering
        trunc_level = oqparam.truncation_level
        maximum_distance = oqparam.maximum_distance
        self.sites = filters.filter_sites_by_distance_to_rupture(
            self.rupture, maximum_distance, self.site_collection)
        if self.sites is None:
            raise RuntimeError('All sites where filtered out! '
                               'maximum_distance=%s km' % maximum_distance)

        # create ses output
        output = models.Output.objects.create(oq_job=self.job,
                                              display_name='SES Collection',
                                              output_type='ses')
        self.ses_coll = models.SESCollection.create(output=output)

        # create gmf output
        output = models.Output.objects.create(oq_job=self.job,
                                              display_name="GMF",
                                              output_type="gmf_scenario")
        self.gmf = models.Gmf.objects.create(output=output)

        with self.monitor('saving ruptures', autoflush=True):
            self.tags = [
                'scenario-%010d' % i
                for i in xrange(oqparam.number_of_ground_motion_fields)
            ]
            _, self.rupids, self.seeds = create_db_ruptures(
                self.rupture, self.ses_coll, self.tags,
                self.oqparam.random_seed)

        correlation_model = models.get_correl_model(
            models.OqJob.objects.get(pk=self.job.id))
        gsim = valid.gsim(oqparam.gsim)
        self.computer = GmfComputer(self.rupture, self.sites, oqparam.imtls,
                                    [gsim], trunc_level, correlation_model)
Пример #8
0
def extract(hc_id, a_writer):
    hc = models.oqparam(hc_id)

    for lt in models.LtRealization.objects.filter(
            lt_model__hazard_calculation=hc.oqjob):

        for imt in hc.intensity_measure_types:
            imt_type, sa_period, _ = from_string(imt)

            if imt_type == "PGA":
                imt_type_fix = "SA"
                sa_period_fix = 0
            else:
                imt_type_fix = imt_type
                sa_period_fix = sa_period

            ruptures = sorted(
                [r.id for r in models.SESRupture.objects.filter(
                    rupture__ses_collection__lt_realization=lt)])

            for site in hc.hazardsite_set.all().order_by('id'):
                gmvs = []
                gmvs_data = dict()

                for ses_coll in models.SESCollection.objects.filter(
                        lt_realization=lt).order_by('id'):
                    for ses in ses_coll:
                        for gmf in models.GmfData.objects.filter(
                                ses_id=ses.ordinal,
                                site=site,
                                imt=imt_type, sa_period=sa_period):
                            gmvs_data.update(
                                dict(zip(gmf.rupture_ids, gmf.gmvs)))
                gmvs.extend([gmvs_data.get(r, 0.0) for r in ruptures])
                a_writer.writerow([lt.id, site.location.x, site.location.y,
                                   imt_type_fix, sa_period_fix] + gmvs)
Пример #9
0
def compute_disagg(sitecol, sources, trt_model_id,
                   trt_num, curves_dict, bin_edges, monitor):
    # see https://bugs.launchpad.net/oq-engine/+bug/1279247 for an explanation
    # of the algorithm used
    """
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :param list sources:
        list of hazardlib source objects
    :param lt_model:
        an instance of :class:`openquake.engine.db.models.LtSourceModel`
    :param dict trt_num:
        a dictionary Tectonic Region Type -> incremental number
    :param curves_dict:
        a dictionary with the hazard curves for sites, realizations and IMTs
    :param bin_egdes:
        a dictionary (lt_model_id, site_id) -> edges
    :param monitor:
        monitor of the currently running job
    :returns:
        a dictionary of probability arrays, with composite key
        (site.id, rlz.id, poe, imt, iml, trt_names).
    """
    hc = models.oqparam(monitor.job_id)
    trt_model = models.TrtModel.objects.get(pk=trt_model_id)
    gsims = trt_model.get_gsim_instances()
    lt_model_id = trt_model.lt_model.id
    rlzs = trt_model.get_rlzs_by_gsim()
    trt_names = tuple(trt_model.lt_model.get_tectonic_region_types())
    result = {}  # site.id, rlz.id, poe, imt, iml, trt_names -> array

    collecting_mon = monitor('collecting bins')
    arranging_mon = monitor('arranging bins')

    for site in sitecol:
        # edges as wanted by disagg._arrange_data_in_bins
        try:
            edges = bin_edges[lt_model_id, site.id]
        except KeyError:
            # bin_edges for a given site are missing if the site is far away
            continue

        # generate source, rupture, sites once per site
        source_ruptures = list(
            gen_ruptures_for_site(site, sources, hc.maximum_distance, monitor))
        if not source_ruptures:
            continue
        logs.LOG.info('Collecting bins from %d ruptures close to %s',
                      sum(len(rupts) for src, rupts in source_ruptures),
                      site.location)

        with collecting_mon:
            bdata = _collect_bins_data(
                trt_num, source_ruptures, site, curves_dict[site.id],
                trt_model_id, gsims, hc.imtls,
                hc.poes_disagg, hc.truncation_level,
                hc.num_epsilon_bins, monitor)

        if not bdata.pnes:  # no contributions for this site
            continue

        for poe in hc.poes_disagg:
            for imt in hc.imtls:
                for gsim in gsims:
                    for rlz in rlzs[gsim.__class__.__name__]:
                        # extract the probabilities of non-exceedance for the
                        # given realization, disaggregation PoE, and IMT
                        iml_pne_pairs = [pne[rlz.id, poe, imt]
                                         for pne in bdata.pnes]
                        iml = iml_pne_pairs[0][0]
                        probs = numpy.array(
                            [p for (i, p) in iml_pne_pairs], float)
                        # bins in a format handy for hazardlib
                        bins = [bdata.mags, bdata.dists,
                                bdata.lons, bdata.lats,
                                bdata.trts, None, probs]

                        # call disagg._arrange_data_in_bins
                        with arranging_mon:
                            key = (site.id, rlz.id, poe, imt, iml, trt_names)
                            matrix = disagg._arrange_data_in_bins(
                                bins, edges + (trt_names,))
                            result[key] = numpy.array(
                                [fn(matrix) for fn in disagg.pmf_map.values()])
    collecting_mon.flush()
    arranging_mon.flush()
    return result
Пример #10
0
def compute_ruptures(sources, sitecol, info, monitor):
    """
    Celery task for the stochastic event set calculator.

    Samples logic trees and calls the stochastic event set calculator.

    Once stochastic event sets are calculated, results will be saved to the
    database. See :class:`openquake.engine.db.models.SESCollection`.

    Optionally (specified in the job configuration using the
    `ground_motion_fields` parameter), GMFs can be computed from each rupture
    in each stochastic event set. GMFs are also saved to the database.

    :param sources:
        List of commonlib.source.Source tuples
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :param info:
        a :class:`openquake.commonlib.source.CompositionInfo` instance
    :param monitor:
        monitor of the currently running job.
    :returns:
        a dictionary trt_model_id -> tot_ruptures
    """
    # NB: all realizations in gsims correspond to the same source model
    trt_model_id = sources[0].trt_model_id
    trt_model = models.TrtModel.objects.get(pk=trt_model_id)
    ses_coll = {sc.ordinal: sc for sc in models.SESCollection.objects.filter(
        trt_model=trt_model)}

    hc = models.oqparam(monitor.job_id)
    tot_ruptures = 0

    filter_sites_mon = monitor('filtering sites', measuremem=False)
    generate_ruptures_mon = monitor('generating ruptures', measuremem=False)
    filter_ruptures_mon = monitor('filtering ruptures', measuremem=False)
    save_ruptures_mon = monitor('saving ruptures', measuremem=False)

    # Compute and save stochastic event sets
    for src in sources:
        t0 = time.time()

        with filter_sites_mon:  # filtering sources
            s_sites = src.filter_sites_by_distance_to_source(
                hc.maximum_distance, sitecol)
            if s_sites is None:
                continue

        with generate_ruptures_mon:
            num_occ_by_rup = sample_ruptures(
                src, hc.ses_per_logic_tree_path, info)

        with filter_ruptures_mon:
            pairs = list(
                build_ses_ruptures(
                    src, num_occ_by_rup, s_sites, hc.maximum_distance, sitecol
                ))
        # saving ses_ruptures
        with save_ruptures_mon:
            for rup, rups in pairs:
                for col_idx in set(r.col_idx for r in rups):
                    prob_rup = models.ProbabilisticRupture.create(
                        rup, ses_coll[col_idx], rups[0].indices)
                    for r in rups:
                        if r.col_idx == col_idx:
                            models.SESRupture.objects.create(
                                rupture=prob_rup, ses_id=r.ses_idx,
                                tag=r.tag, seed=r.seed)

        if num_occ_by_rup:
            num_ruptures = len(num_occ_by_rup)
            occ_ruptures = sum(num for rup in num_occ_by_rup
                               for num in num_occ_by_rup[rup].itervalues())
            tot_ruptures += occ_ruptures
        else:
            num_ruptures = 0
            occ_ruptures = 0

        # save SourceInfo
        source_inserter.add(
            models.SourceInfo(trt_model_id=trt_model_id,
                              source_id=src.source_id,
                              source_class=src.__class__.__name__,
                              num_sites=len(s_sites),
                              num_ruptures=num_ruptures,
                              occ_ruptures=occ_ruptures,
                              uniq_ruptures=num_ruptures,
                              calc_time=time.time() - t0))

    filter_sites_mon.flush()
    generate_ruptures_mon.flush()
    filter_ruptures_mon.flush()
    save_ruptures_mon.flush()
    source_inserter.flush()

    return {trt_model_id: tot_ruptures}
Пример #11
0
def compute_disagg(sitecol, sources, trt_model_id, trt_num, curves_dict,
                   bin_edges, monitor):
    # see https://bugs.launchpad.net/oq-engine/+bug/1279247 for an explanation
    # of the algorithm used
    """
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :param list sources:
        list of hazardlib source objects
    :param lt_model:
        an instance of :class:`openquake.engine.db.models.LtSourceModel`
    :param dict trt_num:
        a dictionary Tectonic Region Type -> incremental number
    :param curves_dict:
        a dictionary with the hazard curves for sites, realizations and IMTs
    :param bin_egdes:
        a dictionary (lt_model_id, site_id) -> edges
    :param monitor:
        monitor of the currently running job
    :returns:
        a dictionary of probability arrays, with composite key
        (site.id, rlz.id, poe, imt, iml, trt_names).
    """
    hc = models.oqparam(monitor.job_id)
    trt_model = models.TrtModel.objects.get(pk=trt_model_id)
    gsims = trt_model.get_gsim_instances()
    lt_model_id = trt_model.lt_model.id
    rlzs = trt_model.get_rlzs_by_gsim()
    trt_names = tuple(trt_model.lt_model.get_tectonic_region_types())
    result = {}  # site.id, rlz.id, poe, imt, iml, trt_names -> array

    collecting_mon = monitor('collecting bins')
    arranging_mon = monitor('arranging bins')

    for site in sitecol:
        # edges as wanted by disagg._arrange_data_in_bins
        try:
            edges = bin_edges[lt_model_id, site.id]
        except KeyError:
            # bin_edges for a given site are missing if the site is far away
            continue

        # generate source, rupture, sites once per site
        source_ruptures = list(
            gen_ruptures_for_site(site, sources, hc.maximum_distance, monitor))
        if not source_ruptures:
            continue
        logs.LOG.info('Collecting bins from %d ruptures close to %s',
                      sum(len(rupts) for src, rupts in source_ruptures),
                      site.location)

        with collecting_mon:
            bdata = _collect_bins_data(trt_num, source_ruptures, site,
                                       curves_dict[site.id], trt_model_id,
                                       gsims, hc.imtls, hc.poes_disagg,
                                       hc.truncation_level,
                                       hc.num_epsilon_bins, monitor)

        if not bdata.pnes:  # no contributions for this site
            continue

        for poe in hc.poes_disagg:
            for imt in hc.imtls:
                for gsim in gsims:
                    for rlz in rlzs[gsim.__class__.__name__]:
                        # extract the probabilities of non-exceedance for the
                        # given realization, disaggregation PoE, and IMT
                        iml_pne_pairs = [
                            pne[rlz.id, poe, imt] for pne in bdata.pnes
                        ]
                        iml = iml_pne_pairs[0][0]
                        probs = numpy.array([p for (i, p) in iml_pne_pairs],
                                            float)
                        # bins in a format handy for hazardlib
                        bins = [
                            bdata.mags, bdata.dists, bdata.lons, bdata.lats,
                            bdata.trts, None, probs
                        ]

                        # call disagg._arrange_data_in_bins
                        with arranging_mon:
                            key = (site.id, rlz.id, poe, imt, iml, trt_names)
                            matrix = disagg._arrange_data_in_bins(
                                bins, edges + (trt_names, ))
                            result[key] = numpy.array(
                                [fn(matrix) for fn in disagg.pmf_map.values()])
    collecting_mon.flush()
    arranging_mon.flush()
    return result
Пример #12
0
def compute_hazard_curves(sources, sitecol, gsims_by_trt_id, monitor):
    """
    This task computes R2 * I hazard curves (each one is a
    numpy array of S * L floats) from the given source_ruptures
    pairs.

    :param sources:
        a block of source objects
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :param gsims_by_trt_id:
        a dictionary trt_id -> gsim instances
    :param monitor:
        monitor of the currently running job
    :returns:
        a dictionary trt_model_id -> (curves_by_gsim, bounding_boxes)
    """
    hc = models.oqparam(monitor.job_id)
    trt_model_id = sources[0].trt_model_id
    total_sites = len(sitecol)
    sitemesh = sitecol.mesh
    sorted_imts = sorted(hc.imtls)
    sorted_imls = [hc.imtls[imt]
                   for imt in sorted_imts]
    sorted_imts = map(from_string, sorted_imts)

    gsims = gsims_by_trt_id[trt_model_id]
    curves = [[numpy.ones([total_sites, len(ls)]) for ls in sorted_imls]
              for gsim in gsims]
    bbs = []
    mon = monitor('getting ruptures', measuremem=False)
    make_ctxt_mon = monitor('making contexts', measuremem=False)
    calc_poes_mon = monitor('computing poes', measuremem=False)

    num_sites = 0
    # NB: rows are namedtuples with fields (source, rupture, rupture_sites)
    for source, rows in itertools.groupby(
            gen_ruptures(sources, sitecol, hc.maximum_distance, mon),
            key=operator.attrgetter('source')):
        t0 = time.time()
        num_ruptures = 0
        for _source, rupture, r_sites in rows:
            num_sites = max(num_sites, len(r_sites))
            num_ruptures += 1
            if hc.poes_disagg:  # doing disaggregation
                jb_dists = rupture.surface.get_joyner_boore_distance(sitemesh)
                closest_points = rupture.surface.get_closest_points(sitemesh)
                for bb, dist, point in itertools.izip(
                        bbs, jb_dists, closest_points):
                    if dist < hc.maximum_distance:
                        # ruptures too far away are ignored
                        bb.update([dist], [point.longitude], [point.latitude])

            # compute probabilities for all realizations
            for gsim, curv in itertools.izip(gsims, curves):
                for i, pnes in enumerate(_calc_pnes(
                        gsim, r_sites, rupture, sorted_imts, sorted_imls,
                        hc.truncation_level, make_ctxt_mon, calc_poes_mon)):
                    curv[i] *= pnes

        inserter.add(
            models.SourceInfo(trt_model_id=trt_model_id,
                              source_id=source.source_id,
                              source_class=source.__class__.__name__,
                              num_sites=num_sites,
                              num_ruptures=num_ruptures,
                              calc_time=time.time() - t0))

    make_ctxt_mon.flush()
    calc_poes_mon.flush()
    inserter.flush()

    # the 0 here is a shortcut for filtered sources giving no contribution;
    # this is essential for performance, we want to avoid returning
    # big arrays of zeros (MS)
    curves_by_gsim = [
        (gsim.__class__.__name__,
         [0 if general.all_equal(c, 1) else 1. - c for c in curv])
        for gsim, curv in zip(gsims, curves)]
    return {trt_model_id: (curves_by_gsim, bbs)}
Пример #13
0
def compute_ruptures(job_id, sources, sitecol):
    """
    Celery task for the stochastic event set calculator.

    Samples logic trees and calls the stochastic event set calculator.

    Once stochastic event sets are calculated, results will be saved to the
    database. See :class:`openquake.engine.db.models.SESCollection`.

    Optionally (specified in the job configuration using the
    `ground_motion_fields` parameter), GMFs can be computed from each rupture
    in each stochastic event set. GMFs are also saved to the database.

    :param int job_id:
        ID of the currently running job.
    :param sources:
        List of commonlib.source.Source tuples
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :returns:
        a dictionary trt_model_id -> tot_ruptures
    """
    # NB: all realizations in gsims correspond to the same source model
    trt_model_id = sources[0].trt_model_id
    trt_model = models.TrtModel.objects.get(pk=trt_model_id)
    ses_coll = models.SESCollection.objects.get(trt_model=trt_model)

    hc = models.oqparam(job_id)
    all_ses = range(1, hc.ses_per_logic_tree_path + 1)
    tot_ruptures = 0

    filter_sites_mon = LightMonitor(
        'filtering sites', job_id, compute_ruptures)
    generate_ruptures_mon = LightMonitor(
        'generating ruptures', job_id, compute_ruptures)
    filter_ruptures_mon = LightMonitor(
        'filtering ruptures', job_id, compute_ruptures)
    save_ruptures_mon = LightMonitor(
        'saving ruptures', job_id, compute_ruptures)

    # Compute and save stochastic event sets
    rnd = random.Random()
    for src in sources:
        t0 = time.time()
        rnd.seed(src.seed)

        with filter_sites_mon:  # filtering sources
            s_sites = src.filter_sites_by_distance_to_source(
                hc.maximum_distance, sitecol)
            if s_sites is None:
                continue

        # the dictionary `ses_num_occ` contains [(ses, num_occurrences)]
        # for each occurring rupture for each ses in the ses collection
        ses_num_occ = collections.defaultdict(list)
        with generate_ruptures_mon:  # generating ruptures for the given source
            for rup_no, rup in enumerate(src.iter_ruptures(), 1):
                rup.rup_no = rup_no
                for ses_idx in all_ses:
                    numpy.random.seed(rnd.randint(0, models.MAX_SINT_32))
                    num_occurrences = rup.sample_number_of_occurrences()
                    if num_occurrences:
                        ses_num_occ[rup].append((ses_idx, num_occurrences))

        # NB: the number of occurrences is very low, << 1, so it is
        # more efficient to filter only the ruptures that occur, i.e.
        # to call sample_number_of_occurrences() *before* the filtering
        for rup in sorted(ses_num_occ, key=operator.attrgetter('rup_no')):
            with filter_ruptures_mon:  # filtering ruptures
                r_sites = filters.filter_sites_by_distance_to_rupture(
                    rup, hc.maximum_distance, s_sites
                    ) if hc.maximum_distance else s_sites
                if r_sites is None:
                    # ignore ruptures which are far away
                    del ses_num_occ[rup]  # save memory
                    continue

            # saving ses_ruptures
            with save_ruptures_mon:
                # using a django transaction make the saving faster
                with transaction.commit_on_success(using='job_init'):
                    indices = r_sites.indices if len(r_sites) < len(sitecol) \
                        else None  # None means that nothing was filtered
                    prob_rup = models.ProbabilisticRupture.create(
                        rup, ses_coll, indices)
                    for ses_idx, num_occurrences in ses_num_occ[rup]:
                        for occ_no in range(1, num_occurrences + 1):
                            rup_seed = rnd.randint(0, models.MAX_SINT_32)
                            models.SESRupture.create(
                                prob_rup, ses_idx, src.source_id,
                                rup.rup_no, occ_no, rup_seed)

        if ses_num_occ:
            num_ruptures = len(ses_num_occ)
            occ_ruptures = sum(num for rup in ses_num_occ
                               for ses, num in ses_num_occ[rup])
            tot_ruptures += occ_ruptures
        else:
            num_ruptures = rup_no
            occ_ruptures = 0

        # save SourceInfo
        source_inserter.add(
            models.SourceInfo(trt_model_id=trt_model_id,
                              source_id=src.source_id,
                              source_class=src.__class__.__name__,
                              num_sites=len(s_sites),
                              num_ruptures=rup_no,
                              occ_ruptures=occ_ruptures,
                              uniq_ruptures=num_ruptures,
                              calc_time=time.time() - t0))

    filter_sites_mon.flush()
    generate_ruptures_mon.flush()
    filter_ruptures_mon.flush()
    save_ruptures_mon.flush()
    source_inserter.flush()

    return {trt_model_id: tot_ruptures}
Пример #14
0
def compute_hazard_curves(sources, sitecol, info, monitor):
    """
    This task computes R2 * I hazard curves (each one is a
    numpy array of S * L floats) from the given source_ruptures
    pairs.

    :param sources:
        a block of source objects
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :param info:
        a :class:`openquake.commonlib.source.CompositionInfo` instance
    :param monitor:
        monitor of the currently running job
    :returns:
        a dictionary trt_model_id -> (curves_by_gsim, bounding_boxes)
    """
    hc = models.oqparam(monitor.job_id)
    trt_model_id = sources[0].trt_model_id
    total_sites = len(sitecol)
    sitemesh = sitecol.mesh
    sorted_imts = sorted(hc.imtls)
    sorted_imls = [hc.imtls[imt] for imt in sorted_imts]
    sorted_imts = map(from_string, sorted_imts)
    trt_model = models.TrtModel.objects.get(pk=trt_model_id)

    gsims = trt_model.get_gsim_instances()
    curves = [[numpy.ones([total_sites, len(ls)]) for ls in sorted_imls]
              for gsim in gsims]
    if hc.poes_disagg:  # doing disaggregation
        lt_model_id = trt_model.lt_model.id
        bbs = [BoundingBox(lt_model_id, site_id) for site_id in sitecol.sids]
    else:
        bbs = []
    mon = monitor('getting ruptures', measuremem=False)
    make_ctxt_mon = monitor('making contexts', measuremem=False)
    calc_poes_mon = monitor('computing poes', measuremem=False)

    num_sites = 0
    # NB: rows are namedtuples with fields (source, rupture, rupture_sites)
    for source, rows in itertools.groupby(gen_ruptures(sources, sitecol,
                                                       hc.maximum_distance,
                                                       mon),
                                          key=operator.attrgetter('source')):
        t0 = time.time()
        num_ruptures = 0
        for _source, rupture, r_sites in rows:
            num_sites = max(num_sites, len(r_sites))
            num_ruptures += 1
            if hc.poes_disagg:  # doing disaggregation
                jb_dists = rupture.surface.get_joyner_boore_distance(sitemesh)
                closest_points = rupture.surface.get_closest_points(sitemesh)
                for bb, dist, point in itertools.izip(bbs, jb_dists,
                                                      closest_points):
                    if dist < hc.maximum_distance:
                        # ruptures too far away are ignored
                        bb.update([dist], [point.longitude], [point.latitude])

            # compute probabilities for all realizations
            for gsim, curv in itertools.izip(gsims, curves):
                for i, pnes in enumerate(
                        _calc_pnes(gsim, r_sites, rupture, sorted_imts,
                                   sorted_imls, hc.truncation_level,
                                   make_ctxt_mon, calc_poes_mon)):
                    curv[i] *= pnes

        inserter.add(
            models.SourceInfo(trt_model_id=trt_model_id,
                              source_id=source.source_id,
                              source_class=source.__class__.__name__,
                              num_sites=num_sites,
                              num_ruptures=num_ruptures,
                              calc_time=time.time() - t0))

    make_ctxt_mon.flush()
    calc_poes_mon.flush()
    inserter.flush()

    # the 0 here is a shortcut for filtered sources giving no contribution;
    # this is essential for performance, we want to avoid returning
    # big arrays of zeros (MS)
    curves_by_gsim = [
        (gsim.__class__.__name__,
         [0 if general.all_equal(c, 1) else 1. - c for c in curv])
        for gsim, curv in zip(gsims, curves)
    ]
    return {trt_model_id: (curves_by_gsim, bbs)}