Exemplo n.º 1
0
def run_preclassical(csm, oqparam, h5):
    """
    :param csm: a CompositeSourceModel with attribute .srcfilter
    :param oqparam: the parameters in job.ini file
    :param h5: a DataStore instance
    """
    logging.info('Sending %s', csm.sitecol)

    # do nothing for atomic sources except counting the ruptures
    for src in csm.get_sources(atomic=True):
        src.num_ruptures = src.count_ruptures()
        src.nsites = len(csm.sitecol)

    # run preclassical for non-atomic sources
    sources_by_grp = groupby(
        csm.get_sources(atomic=False),
        lambda src: (src.grp_id, msr_name(src)))
    param = dict(maximum_distance=oqparam.maximum_distance,
                 pointsource_distance=oqparam.pointsource_distance,
                 ps_grid_spacing=oqparam.ps_grid_spacing,
                 split_sources=oqparam.split_sources)
    srcfilter = SourceFilter(
        csm.sitecol.reduce(10000) if csm.sitecol else None,
        oqparam.maximum_distance)
    res = parallel.Starmap(
        preclassical,
        ((srcs, srcfilter, param) for srcs in sources_by_grp.values()),
        h5=h5, distribute=None if len(sources_by_grp) > 1 else 'no').reduce()

    if res and res['before'] != res['after']:
        logging.info('Reduced the number of sources from {:_d} -> {:_d}'.
                     format(res['before'], res['after']))

    if res and h5:
        csm.update_source_info(res['calc_times'], nsites=True)

    for grp_id, srcs in res.items():
        # srcs can be empty if the minimum_magnitude filter is on
        if srcs and not isinstance(grp_id, str):
            newsg = SourceGroup(srcs[0].tectonic_region_type)
            newsg.sources = srcs
            csm.src_groups[grp_id] = newsg

    # sanity check
    for sg in csm.src_groups:
        for src in sg:
            assert src.num_ruptures
            assert src.nsites

    # store ps_grid data, if any
    for key, sources in res.items():
        if isinstance(key, str) and key.startswith('ps_grid/'):
            arrays = []
            for ps in sources:
                if hasattr(ps, 'location'):
                    lonlats = [ps.location.x, ps.location.y]
                    for src in getattr(ps, 'pointsources', []):
                        lonlats.extend([src.location.x, src.location.y])
                    arrays.append(F32(lonlats))
            h5[key] = arrays
Exemplo n.º 2
0
def read_source_groups(fname):
    """
    :param fname: a path to a source model XML file
    :return: a list of SourceGroup objects containing source nodes
    """
    smodel = nrml.read(fname).sourceModel
    src_groups = []
    if smodel[0].tag.endswith('sourceGroup'):  # NRML 0.5 format
        for sg_node in smodel:
            sg = SourceGroup(sg_node['tectonicRegion'])
            sg.sources = sg_node.nodes
            src_groups.append(sg)
    else:  # NRML 0.4 format: smodel is a list of source nodes
        src_groups.extend(SourceGroup.collect(smodel))
    return src_groups
Exemplo n.º 3
0
 def test_mutually_exclusive_ruptures(self):
     # Test the calculation of hazard curves using mutually exclusive
     # ruptures for a single source
     gsim_by_trt = [SadighEtAl1997()]
     rupture = _create_rupture(10., 6.)
     data = [(rupture, PMF([(0.7, 0), (0.3, 1)])),
             (rupture, PMF([(0.6, 0), (0.4, 1)]))]
     data[0][0].weight = 0.5
     data[1][0].weight = 0.5
     src = NonParametricSeismicSource('0', 'test', "Active Shallow Crust",
                                      data)
     src.id = 0
     src.grp_id = 0
     src.trt_smr = 0
     src.mutex_weight = 1
     group = SourceGroup(src.tectonic_region_type, [src], 'test', 'mutex',
                         'mutex')
     param = dict(imtls=self.imtls,
                  src_interdep=group.src_interdep,
                  rup_interdep=group.rup_interdep,
                  grp_probability=group.grp_probability)
     cmaker = ContextMaker(src.tectonic_region_type, gsim_by_trt, param)
     crv = classical(group, self.sites, cmaker)['pmap'][0]
     npt.assert_almost_equal(numpy.array([0.35000, 0.32497, 0.10398]),
                             crv.array[:, 0],
                             decimal=4)
Exemplo n.º 4
0
 def test_mutually_exclusive_ruptures(self):
     # Test the calculation of hazard curves using mutually exclusive
     # ruptures for a single source
     gsim_by_trt = [SadighEtAl1997()]
     rupture = _create_rupture(10., 6.)
     data = [(rupture, PMF([(0.7, 0), (0.3, 1)])),
             (rupture, PMF([(0.6, 0), (0.4, 1)]))]
     print(data[0][0])
     data[0][0].weight = 0.5
     data[1][0].weight = 0.5
     print(data[0][0].weight)
     src = NonParametricSeismicSource('0', 'test', TRT.ACTIVE_SHALLOW_CRUST,
                                      data)
     src.src_group_id = 0
     src.mutex_weight = 1
     group = SourceGroup(src.tectonic_region_type, [src], 'test', 'mutex',
                         'mutex')
     param = dict(imtls=self.imtls,
                  filter_distance='rjb',
                  src_interdep=group.src_interdep,
                  rup_interdep=group.rup_interdep,
                  grp_probability=group.grp_probability)
     crv = classical(group, self.sites, gsim_by_trt, param)['pmap'][0]
     npt.assert_almost_equal(numpy.array([0.35000, 0.32497, 0.10398]),
                             crv[0].array[:, 0],
                             decimal=4)
Exemplo n.º 5
0
def calc_hazard_curves(
        groups, ss_filter, imtls, gsim_by_trt, truncation_level=None,
        apply=Sequential.apply):
    """
    Compute hazard curves on a list of sites, given a set of seismic source
    groups and a dictionary of ground shaking intensity models (one per
    tectonic region type).

    Probability of ground motion exceedance is computed in different ways
    depending if the sources are independent or mutually exclusive.

    :param groups:
        A sequence of groups of seismic sources objects (instances of
        of :class:`~openquake.hazardlib.source.base.BaseSeismicSource`).
    :param ss_filter:
        A source filter over the site collection or the site collection itself
    :param imtls:
        Dictionary mapping intensity measure type strings
        to lists of intensity measure levels.
    :param gsim_by_trt:
        Dictionary mapping tectonic region types (members
        of :class:`openquake.hazardlib.const.TRT`) to
        :class:`~openquake.hazardlib.gsim.base.GMPE` or
        :class:`~openquake.hazardlib.gsim.base.IPE` objects.
    :param truncation_level:
        Float, number of standard deviations for truncation of the intensity
        distribution.
    :param maximum_distance:
        The integration distance, if any
    :returns:
        An array of size N, where N is the number of sites, which elements
        are records with fields given by the intensity measure types; the
        size of each field is given by the number of levels in ``imtls``.
    """
    # This is ensuring backward compatibility i.e. processing a list of
    # sources
    if not isinstance(groups[0], SourceGroup):  # sent a list of sources
        dic = groupby(groups, operator.attrgetter('tectonic_region_type'))
        groups = [SourceGroup(trt, dic[trt], 'src_group', 'indep', 'indep')
                  for trt in dic]
    if hasattr(ss_filter, 'sitecol'):  # a filter, as it should be
        sitecol = ss_filter.sitecol
    else:  # backward compatibility, a site collection was passed
        sitecol = ss_filter
        ss_filter = SourceFilter(sitecol, {})

    imtls = DictArray(imtls)
    pmap = ProbabilityMap(len(imtls.array), 1)
    # Processing groups with homogeneous tectonic region
    for group in groups:
        if group.src_interdep == 'mutex':  # do not split the group
            pmap |= pmap_from_grp(
                group, ss_filter, imtls, gsim_by_trt, truncation_level)
        else:  # split the group and apply `pmap_from_grp` in parallel
            pmap |= apply(
                pmap_from_grp,
                (group, ss_filter, imtls, gsim_by_trt, truncation_level),
                weight=operator.attrgetter('weight')).reduce(operator.or_)
    return pmap.convert(imtls, len(sitecol.complete))
Exemplo n.º 6
0
def pmap_from_grp(sources,
                  source_site_filter,
                  imtls,
                  gsims,
                  truncation_level=None,
                  bbs=(),
                  monitor=Monitor()):
    """
    Compute the hazard curves for a set of sources belonging to the same
    tectonic region type for all the GSIMs associated to that TRT.
    The arguments are the same as in :func:`calc_hazard_curves`, except
    for ``gsims``, which is a list of GSIM instances.

    :returns: a ProbabilityMap instance
    """
    if isinstance(sources, SourceGroup):
        group = sources
        sources = group.sources
        trt = sources[0].tectonic_region_type
    else:  # list of sources
        trt = sources[0].tectonic_region_type
        group = SourceGroup(trt, sources, 'src_group', 'indep', 'indep')
    try:
        maxdist = source_site_filter.integration_distance[trt]
    except:
        maxdist = source_site_filter.integration_distance
    if hasattr(gsims, 'keys'):  # dictionary trt -> gsim
        gsims = [gsims[trt]]
    with GroundShakingIntensityModel.forbid_instantiation():
        imtls = DictArray(imtls)
        cmaker = ContextMaker(gsims, maxdist)
        ctx_mon = monitor('making contexts', measuremem=False)
        pne_mon = monitor('computing poes', measuremem=False)
        disagg_mon = monitor('get closest points', measuremem=False)
        src_indep = group.src_interdep == 'indep'
        pmap = ProbabilityMap(len(imtls.array), len(gsims))
        pmap.calc_times = []  # pairs (src_id, delta_t)
        pmap.grp_id = sources[0].src_group_id
        for src, s_sites in source_site_filter(sources):
            t0 = time.time()
            poemap = poe_map(src, s_sites, imtls, cmaker, truncation_level,
                             bbs, group.rup_interdep == 'indep', ctx_mon,
                             pne_mon, disagg_mon)
            if src_indep:  # usual composition of probabilities
                pmap |= poemap
            else:  # mutually exclusive probabilities
                weight = float(group.srcs_weights[src.source_id])
                for sid in poemap:
                    pmap[sid] += poemap[sid] * weight
            pmap.calc_times.append(
                (src.source_id, len(s_sites), time.time() - t0))
        # storing the number of contributing ruptures too
        pmap.eff_ruptures = {pmap.grp_id: pne_mon.counts}
        return pmap
Exemplo n.º 7
0
 def test_hazard_curve_B(self):
     # Test simple calculation
     group = SourceGroup(
         TRT.ACTIVE_SHALLOW_CRUST, [self.src2], 'test', 'indep', 'indep')
     groups = [group]
     curves = calc_hazard_curves(groups,
                                 self.sites,
                                 self.imtls,
                                 self.gsim_by_trt,
                                 truncation_level=None)
     npt.assert_almost_equal(numpy.array([0.30000, 0.2646, 0.0625]),
                             curves[0][0], decimal=4)
Exemplo n.º 8
0
def create(label, rupture_hdf5_fname, output_folder, investigation_t,
           trt=TRT.SUBDUCTION_INTRASLAB):
    """
    :param label:
        A string identifying the source
    :param rupture_hdf5_fname:
        Name of the .hdf5 file containing the ruptures
    :param output_folder:
        Folder where to write the .xl files
    :param investigation_t:
        Investigation time in years
    :param trt:
        Tectonic region type label
    """

    # Open the input .hdf5 file with the ruptures
    f = h5py.File(rupture_hdf5_fname, 'r')
    if not os.path.exists(output_folder):
        os.mkdir(output_folder)

    # Create xml
    for mag in f['ruptures'].keys():

        # Check the number of ruptures defined for the current magnitude value
        grp = f['ruptures'][mag]
        if len(grp) < 1:
            tmps = 'Skipping ruptures for magnitude {:.2f}'.format(float(mag))
            logging.warning(tmps)
            continue

        # Set the name of the output nrml file
        fxml = os.path.join(output_folder, '{:s}.xml'.format(mag))

        # Set the source ID
        mags = re.sub('\\.', 'pt', mag)
        sid = 'src_{:s}_{:s}'.format(label, mags)
        name = 'Ruptures for mag bin {:s}'.format(mags)

        # Creates a non-parametric seismic source
        src = create_source(grp, float(mag), sid, name, trt)

        # Create source group
        sgrp = SourceGroup(trt, [src])

        # Create source model
        name = 'Source model for {:s} magnitude {:s}'.format(label, mags)
        mdl = SourceModel([sgrp], name, investigation_t)

        # Write source model
        write_source_model(fxml, mdl, mag)

    f.close()
Exemplo n.º 9
0
 def test_hazard_curve_B(self):
     # independent sources in a group
     group = SourceGroup("Active Shallow Crust", [self.src2], 'test',
                         'indep', 'indep')
     groups = [group]
     curves = calc_hazard_curves(groups,
                                 self.sites,
                                 self.imtls,
                                 self.gsim_by_trt,
                                 truncation_level=None,
                                 investigation_time=1)
     npt.assert_almost_equal(numpy.array([0.30000, 0.2785, 0.0891]),
                             curves[0][0],
                             decimal=4)
Exemplo n.º 10
0
 def test_mutually_exclusive_ruptures(self):
     # Test the calculation of hazard curves using mutually exclusive
     # ruptures for a single source
     gsim_by_trt = [SadighEtAl1997()]
     rupture = _create_rupture(10., 6.)
     data = [(rupture, PMF([(0.7, 0), (0.3, 1)])),
             (rupture, PMF([(0.6, 0), (0.4, 1)]))]
     src = NonParametricSeismicSource('0', 'test', TRT.ACTIVE_SHALLOW_CRUST,
                                      data)
     group = SourceGroup(
         src.tectonic_region_type, [src], 'test', 'indep', 'mutex')
     param = dict(imtls=self.imtls)
     crv = pmap_from_grp(group, self.sites, gsim_by_trt, param)[0]
     npt.assert_almost_equal(numpy.array([0.35000, 0.32497, 0.10398]),
                             crv.array[:, 0], decimal=4)
Exemplo n.º 11
0
def create(label, rupture_hdf5_fname, output_folder, investigation_t):
    """
    :param label:
    :param rupture_hdf5_fname:
    :param output_folder:
    """
    #
    #
    f = h5py.File(rupture_hdf5_fname, 'r')
    if not os.path.exists(output_folder):
        os.mkdir(output_folder)
    #
    #
    trt = TRT.SUBDUCTION_INTRASLAB
    for mag in f['ruptures'].keys():
        #
        # check the number of ruptures defined for the current magnitude value
        grp = f['ruptures'][mag]
        if len(grp) < 1:
            tmps = 'Skipping ruptures for magnitude {:.2f}'.format(float(mag))
            print(tmps)
            logging.warning(tmps)
            continue
        #
        # set the name of the output nrml file
        fnrml = os.path.join(output_folder, '{:s}.nrml'.format(mag))
        #
        # source ID
        mags = re.sub('\\.', 'pt', mag)
        sid = 'src_{:s}_{:s}'.format(label, mags)
        name = 'Ruptures for mag bin {:s}'.format(mags)
        #
        # creates a non-parametric seismic source
        src = create_nrml_source(grp, float(mag), sid, name, trt)
        #
        # create source group
        sgrp = SourceGroup(trt, [src])
        #
        # create source model
        name = 'Source model for {:s} magnitude {:s}'.format(label, mags)
        mdl = SourceModel([sgrp], name, investigation_t)
        #
        # write source model
        write_source_model(fnrml, mdl, mag)
    f.close()
    print('Done')
Exemplo n.º 12
0
def calc_hazard_curves(groups,
                       srcfilter,
                       imtls,
                       gsim_by_trt,
                       truncation_level=None,
                       apply=sequential_apply,
                       reqv=None,
                       **kwargs):
    """
    Compute hazard curves on a list of sites, given a set of seismic source
    groups and a dictionary of ground shaking intensity models (one per
    tectonic region type).

    Probability of ground motion exceedance is computed in different ways
    depending if the sources are independent or mutually exclusive.

    :param groups:
        A sequence of groups of seismic sources objects (instances of
        of :class:`~openquake.hazardlib.source.base.BaseSeismicSource`).
    :param srcfilter:
        A source filter over the site collection or the site collection itself
    :param imtls:
        Dictionary mapping intensity measure type strings
        to lists of intensity measure levels.
    :param gsim_by_trt:
        Dictionary mapping tectonic region types (members
        of :class:`openquake.hazardlib.const.TRT`) to
        :class:`~openquake.hazardlib.gsim.base.GMPE` or
        :class:`~openquake.hazardlib.gsim.base.IPE` objects.
    :param truncation_level:
        Float, number of standard deviations for truncation of the intensity
        distribution.
    :param apply:
        apply function to use (default sequential_apply)
    :param reqv:
        If not None, an instance of RjbEquivalent
    :returns:
        An array of size N, where N is the number of sites, which elements
        are records with fields given by the intensity measure types; the
        size of each field is given by the number of levels in ``imtls``.
    """
    # This is ensuring backward compatibility i.e. processing a list of
    # sources
    if not isinstance(groups[0], SourceGroup):  # sent a list of sources
        odic = groupby(groups, operator.attrgetter('tectonic_region_type'))
        groups = [
            SourceGroup(trt, odic[trt], 'src_group', 'indep', 'indep')
            for trt in odic
        ]
    idx = 0
    span = None
    for i, grp in enumerate(groups):
        for src in grp:
            tom = getattr(src, 'temporal_occurrence_model', None)
            if tom:
                span = tom.time_span
            src.weight = src.count_ruptures()
            src.grp_id = i
            src.id = idx
            idx += 1
    imtls = DictArray(imtls)
    shift_hypo = kwargs['shift_hypo'] if 'shift_hypo' in kwargs else False
    param = dict(imtls=imtls,
                 truncation_level=truncation_level,
                 reqv=reqv,
                 cluster=grp.cluster,
                 shift_hypo=shift_hypo,
                 investigation_time=kwargs.get('investigation_time', span))
    pmap = ProbabilityMap(imtls.size, 1)
    # Processing groups with homogeneous tectonic region
    mon = Monitor()
    sitecol = getattr(srcfilter, 'sitecol', srcfilter)
    for group in groups:
        trt = group.trt
        if sitecol is not srcfilter:
            param['maximum_distance'] = srcfilter.integration_distance(trt)
        cmaker = ContextMaker(trt, [gsim_by_trt[trt]], param, mon)
        if group.atomic:  # do not split
            it = [classical(group, sitecol, cmaker)]
        else:  # split the group and apply `classical` in parallel
            it = apply(classical, (group.sources, sitecol, cmaker),
                       weight=operator.attrgetter('weight'))
        for dic in it:
            pmap |= dic['pmap']
    return pmap.convert(imtls, len(sitecol.complete))
Exemplo n.º 13
0
def pt2fault_distance(pt_sources,
                      fault_sources,
                      min_distance=5.0,
                      filename='source_model.xml',
                      buffer_distance=100.,
                      nrml_version='04',
                      name=None):
    """Calculate distances from a pt source rupture plane
    to the fault sources to then reduce Mmax on events that are 
    within a certain distance
    :param pt_sources:
        list of PointSource objects
    :param fault_sources:
        List of FaultSource objects
    :param min_distance:
        Minimum distance (km) within which we want a point source 
        rupture to be from a fault.
    :param filename:
        Name of output nrml file for revised pt source model
    :param buffer_distance:
        Km, initial filter to only process pts within this
        distance from the fault
    """

    if name is None:
        name = filename[:-4] + '_geom_filtered'
    id_index = 0  # We need to re-number all sources to avoid duplicate ids
    # Extract the points of the fault source mesh
    fault_lons = []
    fault_lats = []
    fault_depths = []
    for fault in fault_sources:
        whole_fault_surface = SimpleFaultSurface.from_fault_data(
            fault.fault_trace, fault.upper_seismogenic_depth,
            fault.lower_seismogenic_depth, fault.dip,
            fault.rupture_mesh_spacing)
        fault_lons.append(whole_fault_surface.mesh.lons.flatten())
        fault_lats.append(whole_fault_surface.mesh.lats.flatten())
        fault_depths.append(whole_fault_surface.mesh.depths.flatten())
    fault_lons = np.concatenate(fault_lons)
    fault_lats = np.concatenate(fault_lats)
    fault_depths = np.concatenate(fault_depths)
    min_fault_lon = np.min(fault_lons)
    max_fault_lon = np.max(fault_lons)
    min_fault_lat = np.min(fault_lats)
    max_fault_lat = np.max(fault_lats)

    # Generate ruptures for point sources
    minimum_distance_list = []
    revised_point_sources = {
        'Cratonic': [],
        'Non_cratonic': [],
        'Extended': [],
        'Banda': []
    }
    for pt in pt_sources:
        print 'Looping over point sources'
        # For speeding things up filter based on initial distances
        # to find points very far from or very close to a fault
        mfd_type = type(pt.mfd).__name__
        pt_depths = []
        for probs, depths in pt.hypocenter_distribution.data:
            pt_depths.append(depths)
        np_probs = []
        np_list = []
        for prob, nodal_plane in pt.nodal_plane_distribution.data:
            np_probs.append(prob)
            np_list.append(nodal_plane)
        centroid_distances = []
        for pt_depth in pt_depths:
            centroid_distances.append(
                distance(pt.location.longitude, pt.location.latitude, pt_depth,
                         fault_lons, fault_lats, fault_depths))
        centroid_distances = np.array(centroid_distances).flatten()
        #      print 'Minimum distance', min(centroid_distances)
        #      print 'Maximum distance', max(centroid_distances)
        if (min(centroid_distances)) > buffer_distance:
            # Keep point as it, not within buffer distance of any faults
            revised_point_sources[pt.tectonic_region_type].append(pt)
            continue
        if (min(centroid_distances)) < min_distance:
            # Discard point sources as too close to a fault
            print 'Discarding point source, too close to a fault'
            continue
        rupture_mags = []
        rupture_lons = []
        rupture_lats = []
        rupture_depths = []
        rupture_strikes = []
        rupture_dips = []
        ruptures = pt.iter_ruptures()
        for rupture in ruptures:
            rupture_mags.append(rupture.mag)
            rupture_lons.append(rupture.surface.corner_lons)
            rupture_lats.append(rupture.surface.corner_lats)
            rupture_depths.append(rupture.surface.corner_depths)
            rupture_strikes.append(rupture.surface.strike)
            rupture_dips.append(rupture.surface.dip)
        rupture_mags = np.array(rupture_mags).flatten()
        # make the same length as the corners
        rupture_mags = np.repeat(rupture_mags, 4)
        rupture_strikes = np.repeat(rupture_strikes, 4)
        rupture_dips = np.repeat(rupture_dips, 4)
        rupture_lons = np.array(rupture_lons).flatten()
        rupture_lats = np.array(rupture_lats).flatten()
        rupture_depths = np.array(rupture_depths).flatten()
        print 'Doing meshgrid'
        lons1, lons2 = np.meshgrid(fault_lons, rupture_lons)
        lats1, lats2 = np.meshgrid(fault_lats, rupture_lats)
        depths1, depths2 = np.meshgrid(fault_depths, rupture_depths)

        # Calculate distance from pt to all fault
        print 'Distance calculations'
        distances = distance(lons1, lats1, depths1, lons2, lats2, depths2)
        closest_distance_to_faults = np.min(distances)
        print 'Shortest pt to fault distance is', closest_distance_to_faults
        minimum_distance_list.append(closest_distance_to_faults)

        # Find where the distance is less than the threshold min_distance
        too_close_lons = lons2[np.where(distances < min_distance)]
        too_close_lats = lats2[np.where(distances < min_distance)]
        if too_close_lons.size > 0:
            lon_indices = np.where(np.in1d(rupture_lons, too_close_lons))[0]
            lat_indices = np.where(np.in1d(rupture_lats, too_close_lats))[0]
            too_close_mags = rupture_mags[np.intersect1d(
                lon_indices, lat_indices)]
            too_close_strikes = rupture_strikes[np.intersect1d(
                lon_indices, lat_indices)]
            too_close_dips = rupture_dips[np.intersect1d(
                lon_indices, lat_indices)]
            #    print 'Magnitudes of rupture close to fault', too_close_mags
            #    print 'Strikes of rupture close to fault', too_close_strikes
            #    print 'Dips of rupture close to fault', too_close_dips
            unique_strikes = np.unique(rupture_strikes)
            unique_dips = np.unique(rupture_dips)
            src_name_index = 0
            for prob, nodal_plane in pt.nodal_plane_distribution.data:
                id_index += 1
                src_name_index += 1
                # We are now splitting the source into many with different
                # combinations of Mmaxs and nodal planes
                new_pt = copy.deepcopy(pt)
                new_pt.source_id = "%i" % id_index
                new_pt.name = new_pt.name + ("_%i" % src_name_index)
                new_np = NodalPlane(nodal_plane.strike, nodal_plane.dip,
                                    nodal_plane.rake)
                new_np_distribution = PMF([
                    (1.0, new_np)
                ])  # weight of nodal plane is 1 as making
                # a separate source
                # Calculate new rates based on probability of original nodal plane
                new_pt.nodal_plane_distribution = new_np_distribution
                if mfd_type == 'TruncatedGRMFD':
                    b_val = pt.mfd.b_val
                    # rescale a value in log sapce
                    a_val = np.log10(np.power(10, pt.mfd.a_val) *
                                     prob)  #*area_src_weight))
                    new_pt.mfd.modify_set_ab(a_val, b_val)
                elif mfd_type == 'EvenlyDiscretizedMFD':
                    mag_bins, rates = zip(
                        *pt.mfd.get_annual_occurrence_rates())
                    mag_bins = np.array(mag_bins)
                    rates = np.array(rates)
                    new_rates = rates * prob  #*area_src_weight)
                    new_pt.mfd.modify_set_mfd(new_pt.mfd.min_mag,
                                              new_pt.mfd.bin_width,
                                              list(new_rates))
                else:
                    msg = 'Weighting method for mfd type %s not yet defined' % mfd_type
                    raise (msg)
                pair_index = np.where(
                    np.logical_and(too_close_strikes == nodal_plane.strike,
                                   too_close_dips == nodal_plane.dip))
                # Deal with intersecting cases
                if len(pair_index[0]) > 0:
                    intersecting_magnitudes = too_close_mags[pair_index]
                    minimum_magnitude_intersecting_fault = min(
                        intersecting_magnitudes)
                    if minimum_magnitude_intersecting_fault >= \
                            (pt.mfd.min_mag + pt.mfd.bin_width):
                        new_mmax = minimum_magnitude_intersecting_fault - \
                                pt.mfd.bin_width
                        if mfd_type == 'TruncatedGRMFD':
                            new_pt.mfd.max_mag = new_mmax
                        if mfd_type == 'EvenlyDiscretizedMFD':
                            trimmed_rates = new_rates[np.where(
                                mag_bins <= new_mmax)]
                    else:
                        print 'Minimum magnitude intersects fault, discarding source'
                        continue

                else:
                    pass
                # Append revised source for given nodal plane distribution to
                # list of revised sources
                print 'Appending revised source'
                revised_point_sources[pt.tectonic_region_type].append(new_pt)
        else:
            id_index += 1
            pt.source_id = "%i" % id_index
            'Appending original source'
            revised_point_sources[pt.tectonic_region_type].append(pt)
    if len(minimum_distance_list) > 0:
        print 'Overall minimum distance (km):', min(minimum_distance_list)

    # Write pts to source model on their own
    source_model_file = filename
    print 'Writing to source model file %s' % source_model_file
    if nrml_version == '04':
        source_list = []
        for trt, sources in revised_point_sources.iteritems():
            for source in sources:
                source_list.append(source)
        nodes = list(map(obj_to_node, sorted(source_list)))
        source_model = Node("sourceModel", {"name": name}, nodes=nodes)
        with open(source_model_file, 'wb') as f:
            nrml.write([source_model], f, '%s', xmlns=NAMESPACE)
    elif nrml_version == '05':
        source_group_list = []
        id = 0
        for trt, sources in revised_point_sources.iteritems():
            source_group = SourceGroup(trt, sources=sources, id=id)
            id += 1
            source_group_list.append(source_group)
        write_source_model(source_model_file, source_group_list, name=name)
    else:
        print 'Warning: nrml version not specfied, xml not created'

    # Write pts to source model with faults
    source_model_file = filename[:-4] + '_inc_faults.xml'
    name = name + '_inc_faults'
    write_combined_faults_points(revised_point_sources,
                                 fault_sources,
                                 source_model_file,
                                 name,
                                 nrml_version='04')
Exemplo n.º 14
0
def write_combined_faults_points(point_sources,
                                 fault_sources,
                                 filename,
                                 name,
                                 nrml_version='04'):
    """Write pts and fault sources to file
    :param point_sources:
       list without trt or dict with trt key of point sources
    """
    print 'Writing to source model file %s' % filename
    ps_id_index = 1
    fs_id_index = 1
    if nrml_version == '04':
        if type(point_sources) == dict:
            source_list = []
            for trt, sources in point_sources.iteritems():
                for source in sources:
                    source.source_id = 'PS_%i' % ps_id_index
                    source_list.append(source)
                    ps_id_index += 1
#                    id_index = max(id_index, source.source_id)
        elif type(point_sources) == list:
            source_list = point_sources
            for source in source_list:
                source.source_id = 'PS_%i' % ps_id_index
                source_list.append(source)
                ps_id_index += 1


#                id_index = max(id_index, source.source_id)
        for fault_source in fault_sources:
            #            id_index += 1
            fault_source.source_id = "FS_%i" % fs_id_index
            fs_id_index += 1
            source_list.append(fault_source)
        nodes = list(map(obj_to_node, sorted(source_list)))
        source_model = Node("sourceModel", {"name": name}, nodes=nodes)
        with open(filename, 'wb') as f:
            nrml.write([source_model], f, '%s', xmlns=NAMESPACE)
    elif nrml_version == '05':
        if type(point_sources) == dict:
            source_group_list = []
            id = 0
            for trt, sources in point_sources.iteritems():
                for source in sources:
                    id_index = max(id_index, source.source_id)
            for trt, sources in point_sources.iteritems():
                for fault_source in fault_sources:
                    if fault_source.tectonic_region_type == trt:
                        id_index += 1
                        fault_source.source_id = "%i" % id_index
                        sources.append(fault_source)
                source_group = SourceGroup(trt, sources=sources, id=id)
                id += 1
                source_group_list.append(source_group)
            write_source_model(filename, source_group_list, name=name)
        elif type(point_sources) == list:
            msg = 'Method not yet implemented for nrml version 0.5'
            raise (msg)
    else:
        print 'Warning: nrml version not specfied, xml not created'
Exemplo n.º 15
0
def run_preclassical(csm, oqparam, h5):
    """
    :param csm: a CompositeSourceModel with attribute .srcfilter
    :param oqparam: the parameters in job.ini file
    :param h5: a DataStore instance
    """
    # do nothing for atomic sources except counting the ruptures
    for src in csm.get_sources(atomic=True):
        src.num_ruptures = src.count_ruptures()
        src.nsites = len(csm.sitecol) if csm.sitecol else 1

    # run preclassical for non-atomic sources
    sources_by_grp = groupby(csm.get_sources(atomic=False), lambda src:
                             (src.grp_id, msr_name(src)))
    param = dict(maximum_distance=oqparam.maximum_distance,
                 pointsource_distance=oqparam.pointsource_distance,
                 ps_grid_spacing=oqparam.ps_grid_spacing,
                 split_sources=oqparam.split_sources)
    srcfilter = SourceFilter(
        csm.sitecol.reduce(10000) if csm.sitecol else None,
        oqparam.maximum_distance)
    if csm.sitecol:
        logging.info('Sending %s', srcfilter.sitecol)
    if oqparam.ps_grid_spacing:
        # produce a preclassical task for each group
        allargs = ((srcs, srcfilter, param)
                   for srcs in sources_by_grp.values())
    else:
        # produce many preclassical task
        maxw = sum(len(srcs) for srcs in sources_by_grp.values()) / (
            oqparam.concurrent_tasks or 1)
        allargs = ((blk, srcfilter, param) for srcs in sources_by_grp.values()
                   for blk in block_splitter(srcs, maxw))
    res = parallel.Starmap(
        preclassical,
        allargs,
        h5=h5,
        distribute=None if len(sources_by_grp) > 1 else 'no').reduce()

    if res and res['before'] != res['after']:
        logging.info(
            'Reduced the number of sources from {:_d} -> {:_d}'.format(
                res['before'], res['after']))

    if res and h5:
        csm.update_source_info(res['calc_times'], nsites=True)

    acc = AccumDict(accum=0)
    code2cls = get_code2cls()
    for grp_id, srcs in res.items():
        # srcs can be empty if the minimum_magnitude filter is on
        if srcs and not isinstance(grp_id, str):
            newsg = SourceGroup(srcs[0].tectonic_region_type)
            newsg.sources = srcs
            csm.src_groups[grp_id] = newsg
            for src in srcs:
                acc[src.code] += int(src.num_ruptures)
    for val, key in sorted((val, key) for key, val in acc.items()):
        cls = code2cls[key].__name__
        logging.info('{} ruptures: {:_d}'.format(cls, val))

    # sanity check
    for sg in csm.src_groups:
        for src in sg:
            assert src.num_ruptures
            assert src.nsites

    # store ps_grid data, if any
    for key, sources in res.items():
        if isinstance(key, str) and key.startswith('ps_grid/'):
            arrays = []
            for ps in sources:
                if hasattr(ps, 'location'):
                    lonlats = [ps.location.x, ps.location.y]
                    for src in getattr(ps, 'pointsources', []):
                        lonlats.extend([src.location.x, src.location.y])
                    arrays.append(F32(lonlats))
            h5[key] = arrays
Exemplo n.º 16
0
def area2pt_source(area_source_file,
                   sources=None,
                   investigation_time=50,
                   rupture_mesh_spacing=10.,
                   width_of_mfd_bin=0.1,
                   area_source_discretisation=10.,
                   filename=None,
                   nrml_version='04',
                   name=None):
    """Calls OpenQuake parsers to read area source model
    from source_mode.xml type file, convert to point sources
    and write to a new nrml source model file.
    :params area_source_file:
        nrml format file of the area source
    :params discretisation:
        Grid size (km) for the area source discretisation, 
        which defines the distance between resulting point
        sources.
    """
    if sources is None:
        sources = nrml2sourcelist(
            area_source_file,
            investigation_time=investigation_time,
            rupture_mesh_spacing=rupture_mesh_spacing,
            width_of_mfd_bin=width_of_mfd_bin,
            area_source_discretisation=area_source_discretisation)
    if name is None:
        name = '%s_points' % filename
    new_pt_sources = {}
    for source in sources:
        pt_sources = area_to_point_sources(source)
        for pt in pt_sources:
            pt.source_id = pt.source_id.replace(':', '')
            pt.name = pt.name.replace(':', '_')
            try:
                new_pt_sources[pt.tectonic_region_type].append(pt)
            except KeyError:
                new_pt_sources[pt.tectonic_region_type] = [pt]
        # print [method for method in dir(pt) if callable(getattr(pt, method))]
        #  print [attribute for attribute in dir(pt)]
    nrml_pt_file = area_source_file[:-4] + '_pts.xml'
    source_group_list = []
    id = 0
    for trt, sources in new_pt_sources.iteritems():
        source_group = SourceGroup(trt, sources=sources, id=id)
        id += 1
        source_group_list.append(source_group)
    if filename is not None:
        if nrml_version == '04':
            source_list = []
            for trt, sources in new_pt_sources.iteritems():
                for source in sources:
                    source_list.append(source)
            nodes = list(map(obj_to_node, sorted(source_list)))
            source_model = Node("sourceModel", {"name": name}, nodes=nodes)
            with open(nrml_pt_file, 'wb') as f:
                nrml.write([source_model], f, '%s', xmlns=NAMESPACE)
        # This will write version 0.5
        elif nrml_version == '05':
            write_source_model(nrml_pt_file, source_group_list, name=filename)
        else:
            print 'Warning: nrml version not specfied, xml not created'
    return source_group_list
Exemplo n.º 17
0
def run_preclassical(calc):
    """
    :param csm: a CompositeSourceModel
    :param oqparam: the parameters in job.ini file
    :param h5: a DataStore instance
    """
    csm = calc.csm
    calc.datastore['trt_smrs'] = csm.get_trt_smrs()
    calc.datastore['toms'] = numpy.array(
        [sg.tom_name for sg in csm.src_groups], hdf5.vstr)
    cmakers = read_cmakers(calc.datastore, csm.full_lt)
    h5 = calc.datastore.hdf5
    calc.sitecol = sites = csm.sitecol if csm.sitecol else None
    # do nothing for atomic sources except counting the ruptures
    atomic_sources = []
    normal_sources = []
    for sg in csm.src_groups:
        grp_id = sg.sources[0].grp_id
        if sg.atomic:
            cmakers[grp_id].set_weight(sg, sites)
            atomic_sources.extend(sg)
        else:
            normal_sources.extend(sg)
    # run preclassical for non-atomic sources
    sources_by_grp = groupby(normal_sources, lambda src:
                             (src.grp_id, msr_name(src)))
    if csm.sitecol:
        logging.info('Sending %s', sites)
    smap = parallel.Starmap(preclassical, h5=h5)
    for (grp_id, msr), srcs in sources_by_grp.items():
        pointsources, pointlike, others = [], [], []
        for src in srcs:
            if hasattr(src, 'location'):
                pointsources.append(src)
            elif hasattr(src, 'nodal_plane_distribution'):
                pointlike.append(src)
            else:
                others.append(src)
        if calc.oqparam.ps_grid_spacing:
            if pointsources or pointlike:
                smap.submit((pointsources + pointlike, sites, cmakers[grp_id]))
        else:
            smap.submit_split((pointsources, sites, cmakers[grp_id]), 10, 100)
            for src in pointlike:  # area, multipoint
                smap.submit(([src], sites, cmakers[grp_id]))
        smap.submit_split((others, sites, cmakers[grp_id]), 10, 100)
    normal = smap.reduce()
    if atomic_sources:  # case_35
        n = len(atomic_sources)
        atomic = AccumDict({'before': n, 'after': n})
        for grp_id, srcs in groupby(atomic_sources,
                                    lambda src: src.grp_id).items():
            atomic[grp_id] = srcs
    else:
        atomic = AccumDict()
    res = normal + atomic
    if res['before'] != res['after']:
        logging.info(
            'Reduced the number of point sources from {:_d} -> {:_d}'.format(
                res['before'], res['after']))
    acc = AccumDict(accum=0)
    code2cls = get_code2cls()
    for grp_id, srcs in res.items():
        # srcs can be empty if the minimum_magnitude filter is on
        if srcs and not isinstance(grp_id, str) and grp_id not in atomic:
            # check if OQ_SAMPLE_SOURCES is set
            ss = os.environ.get('OQ_SAMPLE_SOURCES')
            if ss:
                logging.info('Sampled sources for group #%d', grp_id)
                srcs = general.random_filter(srcs, float(ss)) or [srcs[0]]
            newsg = SourceGroup(srcs[0].tectonic_region_type)
            newsg.sources = srcs
            csm.src_groups[grp_id] = newsg
            for src in srcs:
                assert src.weight
                assert src.num_ruptures
                acc[src.code] += int(src.num_ruptures)
    for val, key in sorted((val, key) for key, val in acc.items()):
        cls = code2cls[key].__name__
        logging.info('{} ruptures: {:_d}'.format(cls, val))

    source_data = zero_times(csm.get_sources())
    calc.store_source_info(source_data)
    # store ps_grid data, if any
    for key, sources in res.items():
        if isinstance(key, str) and key.startswith('ps_grid/'):
            arrays = []
            for ps in sources:
                if hasattr(ps, 'location'):
                    lonlats = [ps.location.x, ps.location.y]
                    for src in getattr(ps, 'pointsources', []):
                        lonlats.extend([src.location.x, src.location.y])
                    arrays.append(F32(lonlats))
            h5[key] = arrays

    h5['full_lt'] = csm.full_lt
    return res
Exemplo n.º 18
0
def pmap_from_grp(sources, src_filter, gsims, param, monitor=Monitor()):
    """
    Compute the hazard curves for a set of sources belonging to the same
    tectonic region type for all the GSIMs associated to that TRT.
    The arguments are the same as in :func:`calc_hazard_curves`, except
    for ``gsims``, which is a list of GSIM instances.

    :returns: a ProbabilityMap instance
    """
    if isinstance(sources, SourceGroup):
        group = sources
        sources = group.sources
        trt = sources[0].tectonic_region_type
        mutex_weight = {
            src.source_id: weight
            for src, weight in zip(group.sources, group.srcs_weights)
        }
    else:  # list of sources
        trt = sources[0].tectonic_region_type
        group = SourceGroup(trt, sources, 'src_group', 'indep', 'indep')
    grp_id = sources[0].src_group_id
    maxdist = src_filter.integration_distance
    if hasattr(gsims, 'keys'):  # dictionary trt -> gsim
        gsims = [gsims[trt]]
    srcs = []
    for src in sources:
        if hasattr(src, '__iter__'):  # MultiPointSource
            srcs.extend(src)
        else:
            srcs.append(src)
    del sources
    with GroundShakingIntensityModel.forbid_instantiation():
        imtls = param['imtls']
        trunclevel = param.get('truncation_level')
        cmaker = ContextMaker(gsims, maxdist)
        ctx_mon = monitor('making contexts', measuremem=False)
        pne_mons = [
            monitor('%s.get_poes' % gsim, measuremem=False) for gsim in gsims
        ]
        src_indep = group.src_interdep == 'indep'
        pmap = ProbabilityMap(len(imtls.array), len(gsims))
        pmap.calc_times = []  # pairs (src_id, delta_t)
        pmap.grp_id = grp_id
        for src, s_sites in src_filter(srcs):
            t0 = time.time()
            poemap = poe_map(src, s_sites, imtls, cmaker, trunclevel, ctx_mon,
                             pne_mons, group.rup_interdep == 'indep')
            if src_indep:  # usual composition of probabilities
                pmap |= poemap
            else:  # mutually exclusive probabilities
                weight = mutex_weight[src.source_id]
                for sid in poemap:
                    pcurve = pmap.setdefault(sid, 0)
                    pcurve += poemap[sid] * weight
            pmap.calc_times.append(
                (src.source_id, src.weight, len(s_sites), time.time() - t0))
        # storing the number of contributing ruptures too
        pmap.eff_ruptures = {pmap.grp_id: pne_mons[0].counts}
        if group.grp_probability is not None:
            return pmap * group.grp_probability
        return pmap
Exemplo n.º 19
0
def calc_hazard_curves(groups,
                       srcfilter,
                       imtls,
                       gsim_by_trt,
                       truncation_level=None,
                       apply=sequential_apply,
                       filter_distance='rjb',
                       reqv=None,
                       **kwargs):
    """
    Compute hazard curves on a list of sites, given a set of seismic source
    groups and a dictionary of ground shaking intensity models (one per
    tectonic region type).

    Probability of ground motion exceedance is computed in different ways
    depending if the sources are independent or mutually exclusive.

    :param groups:
        A sequence of groups of seismic sources objects (instances of
        of :class:`~openquake.hazardlib.source.base.BaseSeismicSource`).
    :param srcfilter:
        A source filter over the site collection or the site collection itself
    :param imtls:
        Dictionary mapping intensity measure type strings
        to lists of intensity measure levels.
    :param gsim_by_trt:
        Dictionary mapping tectonic region types (members
        of :class:`openquake.hazardlib.const.TRT`) to
        :class:`~openquake.hazardlib.gsim.base.GMPE` or
        :class:`~openquake.hazardlib.gsim.base.IPE` objects.
    :param truncation_level:
        Float, number of standard deviations for truncation of the intensity
        distribution.
    :param apply:
        apply function to use (default sequential_apply)
    :param filter_distance:
        The distance used to filter the ruptures (default rjb)
    :param reqv:
        If not None, an instance of RjbEquivalent
    :returns:
        An array of size N, where N is the number of sites, which elements
        are records with fields given by the intensity measure types; the
        size of each field is given by the number of levels in ``imtls``.
    """
    # This is ensuring backward compatibility i.e. processing a list of
    # sources
    if not isinstance(groups[0], SourceGroup):  # sent a list of sources
        odic = groupby(groups, operator.attrgetter('tectonic_region_type'))
        groups = [
            SourceGroup(trt, odic[trt], 'src_group', 'indep', 'indep')
            for trt in odic
        ]
    # ensure the sources have the right src_group_id
    for i, grp in enumerate(groups):
        for src in grp:
            if src.src_group_id is None:
                src.src_group_id = i
    imtls = DictArray(imtls)
    shift_hypo = kwargs['shift_hypo'] if 'shift_hypo' in kwargs else False
    param = dict(imtls=imtls,
                 truncation_level=truncation_level,
                 filter_distance=filter_distance,
                 reqv=reqv,
                 cluster=grp.cluster,
                 shift_hypo=shift_hypo)
    pmap = ProbabilityMap(len(imtls.array), 1)
    # Processing groups with homogeneous tectonic region
    gsim = gsim_by_trt[groups[0][0].tectonic_region_type]
    mon = Monitor()
    for group in groups:
        if group.atomic:  # do not split
            it = [classical(group, srcfilter, [gsim], param, mon)]
        else:  # split the group and apply `classical` in parallel
            it = apply(classical,
                       (group.sources, srcfilter, [gsim], param, mon),
                       weight=operator.attrgetter('weight'))
        for dic in it:
            for grp_id, pval in dic['pmap'].items():
                pmap |= pval
    sitecol = getattr(srcfilter, 'sitecol', srcfilter)
    return pmap.convert(imtls, len(sitecol.complete))
Exemplo n.º 20
0
    def execute(self):
        """
        Run in parallel `core_task(sources, sitecol, monitor)`, by
        parallelizing on the sources according to their weight and
        tectonic region type.
        """
        oq = self.oqparam
        if oq.hazard_calculation_id and not oq.compare_with_classical:
            with util.read(self.oqparam.hazard_calculation_id) as parent:
                self.full_lt = parent['full_lt']
            self.calc_stats()  # post-processing
            return {}

        assert oq.max_sites_per_tile > oq.max_sites_disagg, (
            oq.max_sites_per_tile, oq.max_sites_disagg)
        psd = self.set_psd()
        srcfilter = self.src_filter()
        performance.Monitor.save(self.datastore, 'srcfilter', srcfilter)
        srcs = self.csm.get_sources(atomic=False)
        if srcs:
            res = parallel.Starmap.apply(preclassical, (srcs, self.params),
                                         concurrent_tasks=oq.concurrent_tasks
                                         or 1,
                                         h5=self.datastore.hdf5).reduce()

            if oq.calculation_mode == 'preclassical':
                self.store_source_info(res['calc_times'], nsites=True)
                self.datastore['full_lt'] = self.csm.full_lt
                self.datastore.swmr_on()  # fixes HDF5 error in build_hazard
                return

            self.update_source_info(res['calc_times'], nsites=True)
            sources_by_grp = groupby(res['sources'],
                                     operator.attrgetter('grp_id'))
        else:
            for src in self.csm.get_sources(atomic=True):
                src.num_ruptures = src.count_ruptures()
                src.nsites = self.N
            sources_by_grp = {}
        self.csm.src_groups = [sg for sg in self.csm.src_groups if sg.atomic]
        if oq.ps_grid_spacing:
            smap = parallel.Starmap(
                grid_point_sources,
                h5=self.datastore.hdf5,
                distribute=None if len(sources_by_grp) > 1 else 'no')
            for grp_id, sources in sources_by_grp.items():
                smap.submit((sources, oq.ps_grid_spacing))
            dic = smap.reduce()
            before, after = 0, 0
            for grp_id, sources in sources_by_grp.items():
                before += len(sources)
                after += len(dic[grp_id])
                sg = SourceGroup(sources[0].tectonic_region_type)
                sg.sources = dic[grp_id]
                self.csm.src_groups.append(sg)
            logging.info('Reduced point sources %d->%d', before, after)
        else:
            for grp_id, sources in sources_by_grp.items():
                sg = SourceGroup(sources[0].tectonic_region_type)
                sg.sources = sources
                self.csm.src_groups.append(sg)
        smap = parallel.Starmap(classical, h5=self.datastore.hdf5)
        self.submit_tasks(smap)
        acc0 = self.acc0()  # create the rup/ datasets BEFORE swmr_on()
        self.datastore.swmr_on()
        smap.h5 = self.datastore.hdf5
        self.calc_times = AccumDict(accum=numpy.zeros(3, F32))
        try:
            acc = smap.reduce(self.agg_dicts, acc0)
            self.store_rlz_info(acc.eff_ruptures)
        finally:
            source_ids = self.store_source_info(self.calc_times)
            if self.by_task:
                logging.info('Storing by_task information')
                num_tasks = max(self.by_task) + 1,
                er = self.datastore.create_dset('by_task/eff_ruptures', U32,
                                                num_tasks)
                es = self.datastore.create_dset('by_task/eff_sites', U32,
                                                num_tasks)
                si = self.datastore.create_dset('by_task/srcids',
                                                hdf5.vstr,
                                                num_tasks,
                                                fillvalue=None)
                for task_no, rec in self.by_task.items():
                    effrups, effsites, srcids = rec
                    er[task_no] = effrups
                    es[task_no] = effsites
                    si[task_no] = ' '.join(source_ids[s] for s in srcids)
                self.by_task.clear()
        if self.calc_times:  # can be empty in case of errors
            self.numrups = sum(arr[0] for arr in self.calc_times.values())
            numsites = sum(arr[1] for arr in self.calc_times.values())
            logging.info('Effective number of ruptures: {:_d}/{:_d}'.format(
                int(self.numrups), self.totrups))
            logging.info('Effective number of sites per rupture: %d',
                         numsites / self.numrups)
        if psd:
            psdist = max(max(psd.ddic[trt].values()) for trt in psd.ddic)
            if psdist and self.maxradius >= psdist / 2:
                logging.warning(
                    'The pointsource_distance of %d km is too '
                    'small compared to a maxradius of %d km', psdist,
                    self.maxradius)
        self.calc_times.clear()  # save a bit of memory
        return acc