class RuptureData(object): """ Container for information about the ruptures of a given tectonic region type. """ def __init__(self, trt, gsims): self.trt = trt self.cmaker = ContextMaker(gsims) self.params = sorted(self.cmaker.REQUIRES_RUPTURE_PARAMETERS - set('mag strike dip rake hypo_depth'.split())) self.dt = numpy.dtype( [('rup_id', U32), ('multiplicity', U16), ('eidx', U32), ('numsites', U32), ('occurrence_rate', F64), ('mag', F32), ('lon', F32), ('lat', F32), ('depth', F32), ('strike', F32), ('dip', F32), ('rake', F32), ('boundary', hdf5.vstr)] + [(param, F32) for param in self.params]) def to_array(self, ebruptures): """ Convert a list of ebruptures into an array of dtype RuptureRata.dt """ data = [] for ebr in ebruptures: rup = ebr.rupture rc = self.cmaker.make_rupture_context(rup) ruptparams = tuple(getattr(rc, param) for param in self.params) point = rup.surface.get_middle_point() multi_lons, multi_lats = rup.surface.get_surface_boundaries() bounds = ','.join('((%s))' % ','.join('%.5f %.5f' % (lon, lat) for lon, lat in zip(lons, lats)) for lons, lats in zip(multi_lons, multi_lats)) try: rate = ebr.rupture.occurrence_rate except AttributeError: # for nonparametric sources rate = numpy.nan data.append((ebr.serial, ebr.multiplicity, ebr.eidx1, len(ebr.sids), rate, rup.mag, point.x, point.y, point.z, rup.surface.get_strike(), rup.surface.get_dip(), rup.rake, 'MULTIPOLYGON(%s)' % decode(bounds)) + ruptparams) return numpy.array(data, self.dt)
class RuptureData(object): """ Container for information about the ruptures of a given tectonic region type. """ def __init__(self, trt, gsims): self.trt = trt self.cmaker = ContextMaker(gsims) self.params = sorted(self.cmaker.REQUIRES_RUPTURE_PARAMETERS - set('mag strike dip rake hypo_depth'.split())) self.dt = numpy.dtype([ ('rupserial', U32), ('multiplicity', U16), ('numsites', U32), ('occurrence_rate', F64), ('mag', F64), ('lon', F32), ('lat', F32), ('depth', F32), ('strike', F64), ('dip', F64), ('rake', F64), ('boundary', hdf5.vstr)] + [(param, F64) for param in self.params]) def to_array(self, ebruptures): data = [] for ebr in ebruptures: rup = ebr.rupture rc = self.cmaker.make_rupture_context(rup) ruptparams = tuple(getattr(rc, param) for param in self.params) point = rup.surface.get_middle_point() multi_lons, multi_lats = rup.surface.get_surface_boundaries() boundary = ','.join('((%s))' % ','.join( '%.5f %.5f' % (lon, lat) for lon, lat in zip(lons, lats)) for lons, lats in zip(multi_lons, multi_lats)) try: rate = ebr.rupture.occurrence_rate except AttributeError: # for nonparametric sources rate = numpy.nan data.append((ebr.serial, ebr.multiplicity, len(ebr.sids), rate, rup.mag, point.x, point.y, point.z, rup.surface.get_strike(), rup.surface.get_dip(), rup.rake, decode(boundary)) + ruptparams) return numpy.array(data, self.dt)
def compute_ruptures(sources, sitecol, siteidx, rlzs_assoc, monitor): """ :param sources: List of commonlib.source.Source tuples :param sitecol: a :class:`openquake.hazardlib.site.SiteCollection` instance :param siteidx: always equal to 0 :param rlzs_assoc: a :class:`openquake.commonlib.source.RlzsAssoc` instance :param monitor: monitor instance :returns: a dictionary trt_model_id -> [Rupture instances] """ assert siteidx == 0, ( 'siteidx can be nonzero only for the classical_tiling calculations: ' 'tiling with the EventBasedRuptureCalculator is an error') # NB: by construction each block is a non-empty list with # sources of the same trt_model_id trt_model_id = sources[0].trt_model_id oq = monitor.oqparam trt = sources[0].tectonic_region_type max_dist = oq.maximum_distance[trt] cmaker = ContextMaker(rlzs_assoc.gsims_by_trt_id[trt_model_id]) params = cmaker.REQUIRES_RUPTURE_PARAMETERS rup_data_dt = numpy.dtype([('rupserial', U32), ('multiplicity', U16), ('numsites', U32)] + [(param, F32) for param in params]) eb_ruptures = [] rup_data = [] calc_times = [] rup_mon = monitor('filtering ruptures', measuremem=False) num_samples = rlzs_assoc.samples[trt_model_id] # Compute and save stochastic event sets for src in sources: t0 = time.time() s_sites = src.filter_sites_by_distance_to_source(max_dist, sitecol) if s_sites is None: continue rupture_filter = functools.partial(filter_sites_by_distance_to_rupture, integration_distance=max_dist, sites=s_sites) num_occ_by_rup = sample_ruptures(src, oq.ses_per_logic_tree_path, num_samples, rlzs_assoc.seed) # NB: the number of occurrences is very low, << 1, so it is # more efficient to filter only the ruptures that occur, i.e. # to call sample_ruptures *before* the filtering for ebr in build_eb_ruptures(src, num_occ_by_rup, rupture_filter, oq.random_seed, rup_mon): nsites = len(ebr.indices) rc = cmaker.make_rupture_context(ebr.rupture) ruptparams = tuple(getattr(rc, param) for param in params) rup_data.append((ebr.serial, len(ebr.etags), nsites) + ruptparams) eb_ruptures.append(ebr) dt = time.time() - t0 calc_times.append((src.id, dt)) res = AccumDict({trt_model_id: eb_ruptures}) res.calc_times = calc_times res.rup_data = numpy.array(rup_data, rup_data_dt) res.trt = trt return res
def compute_ruptures(sources, sitecol, siteidx, rlzs_assoc, monitor): """ :param sources: List of commonlib.source.Source tuples :param sitecol: a :class:`openquake.hazardlib.site.SiteCollection` instance :param siteidx: always equal to 0 :param rlzs_assoc: a :class:`openquake.commonlib.source.RlzsAssoc` instance :param monitor: monitor instance :returns: a dictionary src_group_id -> [Rupture instances] """ assert siteidx == 0, ( 'siteidx can be nonzero only for the classical_tiling calculations: ' 'tiling with the EventBasedRuptureCalculator is an error') # NB: by construction each block is a non-empty list with # sources of the same src_group_id src_group_id = sources[0].src_group_id oq = monitor.oqparam trt = sources[0].tectonic_region_type max_dist = oq.maximum_distance[trt] cmaker = ContextMaker(rlzs_assoc.gsims_by_grp_id[src_group_id]) params = sorted(cmaker.REQUIRES_RUPTURE_PARAMETERS) rup_data_dt = numpy.dtype( [('rupserial', U32), ('multiplicity', U16), ('numsites', U32), ('occurrence_rate', F32)] + [ (param, F32) for param in params]) eb_ruptures = [] rup_data = [] calc_times = [] rup_mon = monitor('filtering ruptures', measuremem=False) num_samples = rlzs_assoc.samples[src_group_id] # Compute and save stochastic event sets for src in sources: t0 = time.time() s_sites = src.filter_sites_by_distance_to_source(max_dist, sitecol) if s_sites is None: continue rupture_filter = functools.partial( filter_sites_by_distance_to_rupture, integration_distance=max_dist, sites=s_sites) num_occ_by_rup = sample_ruptures( src, oq.ses_per_logic_tree_path, num_samples, rlzs_assoc.seed) # NB: the number of occurrences is very low, << 1, so it is # more efficient to filter only the ruptures that occur, i.e. # to call sample_ruptures *before* the filtering for ebr in build_eb_ruptures( src, num_occ_by_rup, rupture_filter, oq.random_seed, rup_mon): nsites = len(ebr.indices) try: rate = ebr.rupture.occurrence_rate except AttributeError: # for nonparametric sources rate = numpy.nan rc = cmaker.make_rupture_context(ebr.rupture) ruptparams = tuple(getattr(rc, param) for param in params) rup_data.append((ebr.serial, len(ebr.etags), nsites, rate) + ruptparams) eb_ruptures.append(ebr) dt = time.time() - t0 calc_times.append((src.id, dt)) res = AccumDict({src_group_id: eb_ruptures}) res.calc_times = calc_times res.rup_data = numpy.array(rup_data, rup_data_dt) res.trt = trt return res
def compute_ruptures(sources, sitecol, gsims, monitor): """ :param sources: List of commonlib.source.Source tuples :param sitecol: a :class:`openquake.hazardlib.site.SiteCollection` instance :param gsims: a list of GSIMs for the current tectonic region model :param monitor: monitor instance :returns: a dictionary src_group_id -> [Rupture instances] """ # NB: by construction each block is a non-empty list with # sources of the same src_group_id src_group_id = sources[0].src_group_id trt = sources[0].tectonic_region_type max_dist = monitor.maximum_distance[trt] cmaker = ContextMaker(gsims) params = sorted(cmaker.REQUIRES_RUPTURE_PARAMETERS) rup_data_dt = numpy.dtype( [('rupserial', U32), ('multiplicity', U16), ('numsites', U32), ('occurrence_rate', F64)] + [ (param, F64) for param in params]) eb_ruptures = [] rup_data = [] calc_times = [] rup_mon = monitor('filtering ruptures', measuremem=False) num_samples = monitor.samples num_events = 0 # Compute and save stochastic event sets for src in sources: t0 = time.time() s_sites = src.filter_sites_by_distance_to_source(max_dist, sitecol) if s_sites is None: continue rupture_filter = functools.partial( filter_sites_by_distance_to_rupture, integration_distance=max_dist, sites=s_sites) num_occ_by_rup = sample_ruptures( src, monitor.ses_per_logic_tree_path, num_samples, monitor.seed) # NB: the number of occurrences is very low, << 1, so it is # more efficient to filter only the ruptures that occur, i.e. # to call sample_ruptures *before* the filtering for ebr in build_eb_ruptures( src, num_occ_by_rup, rupture_filter, monitor.seed, rup_mon): nsites = len(ebr.indices) try: rate = ebr.rupture.occurrence_rate except AttributeError: # for nonparametric sources rate = numpy.nan rc = cmaker.make_rupture_context(ebr.rupture) ruptparams = tuple(getattr(rc, param) for param in params) rup_data.append((ebr.serial, ebr.multiplicity, nsites, rate) + ruptparams) eb_ruptures.append(ebr) num_events += ebr.multiplicity dt = time.time() - t0 calc_times.append((src.id, dt)) res = AccumDict({src_group_id: eb_ruptures}) res.num_events = num_events res.calc_times = calc_times res.rup_data = numpy.array(rup_data, rup_data_dt) res.trt = trt return res