示例#1
0
 def get_ruptures(self, srcfilter=calc.filters.nofilter, min_mag=0):
     """
     :returns: a list of EBRuptures filtered by bounding box
     """
     ebrs = []
     with datastore.read(self.filename) as dstore:
         rupgeoms = dstore['rupgeoms']
         for e0, rec in zip(self.e0, self.rup_array):
             if rec['mag'] < min_mag:
                 continue
             if srcfilter.integration_distance:
                 sids = srcfilter.close_sids(rec, self.trt)
                 if len(sids) == 0:  # the rupture is far away
                     continue
             else:
                 sids = None
             geom = rupgeoms[rec['gidx1']:rec['gidx2']].reshape(
                 rec['sx'], rec['sy'])
             rupture = get_rupture(rec, geom, self.trt)
             grp_id = rec['grp_id']
             ebr = EBRupture(rupture, rec['srcidx'], grp_id, rec['n_occ'],
                             self.samples)
             # not implemented: rupture_slip_direction
             ebr.sids = sids
             ebr.e0 = 0 if self.e0 is None else e0
             ebr.id = rec['id']  # rup_id  in the datastore
             ebrs.append(ebr)
     return ebrs
示例#2
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer and seeds
     """
     oq = self.oqparam
     cinfo = source.CompositionInfo.fake(readinput.get_gsim_lt(oq))
     self.datastore['csm_info'] = cinfo
     if 'rupture_model' not in oq.inputs:
         logging.warn('There is no rupture_model, the calculator will just '
                      'import data without performing any calculation')
         super().pre_execute()
         return
     self.rup = readinput.get_rupture(oq)
     self.gsims = readinput.get_gsims(oq)
     R = len(self.gsims)
     self.cmaker = ContextMaker(self.gsims, oq.maximum_distance,
                                {'filter_distance': oq.filter_distance})
     super().pre_execute()
     self.datastore['oqparam'] = oq
     self.rlzs_assoc = cinfo.get_rlzs_assoc()
     rlzs_by_gsim = self.rlzs_assoc.get_rlzs_by_gsim(0)
     E = oq.number_of_ground_motion_fields
     n_occ = numpy.array([E])
     ebr = EBRupture(self.rup, 0, 0, self.sitecol.sids, n_occ)
     events = numpy.zeros(E * R, events_dt)
     for rlz, eids in ebr.get_eids_by_rlz(rlzs_by_gsim).items():
         events[rlz * E:rlz * E + E]['eid'] = eids
         events[rlz * E:rlz * E + E]['rlz'] = rlz
     self.datastore['events'] = events
     rupser = calc.RuptureSerializer(self.datastore)
     rupser.save(get_rup_array([ebr]))
     rupser.close()
     self.computer = GmfComputer(ebr, self.sitecol, oq.imtls, self.cmaker,
                                 oq.truncation_level, oq.correl_model)
示例#3
0
def build_eb_ruptures(src, rlzs, num_ses, cmaker, s_sites, rup_n_occ=()):
    """
    :param src: a source object
    :param rlzs: realizations of the source model as numpy.uint16 numbers
    :param num_ses: number of stochastic event sets
    :param cmaker: a ContextMaker instance
    :param s_sites: a (filtered) site collection
    :param rup_n_occ: (rup, n_occ) pairs [inferred from the source]
    :returns: a list of EBRuptures
    """
    # NB: s_sites can be None if cmaker.maximum_distance is False, then
    # the contexts are not computed and the ruptures not filtered
    ebrs = []
    samples = getattr(src, 'samples', 1)
    nr = len(rlzs)
    if rup_n_occ == ():
        rup_n_occ = src.sample_ruptures(samples, num_ses, cmaker.ir_mon)
    for rup, n_occ in rup_n_occ:
        if cmaker.maximum_distance:
            with cmaker.ctx_mon:
                try:
                    rup.sctx, rup.dctx = cmaker.make_contexts(s_sites, rup)
                    indices = rup.sctx.sids
                except FarAwayRupture:
                    continue
        else:
            indices = ()

        if not hasattr(src, 'samples'):  # full enumeration
            n_occ = fix_shape(n_occ, nr)

        # creating events
        with cmaker.evs_mon:
            occ = n_occ.sum(axis=1)  # occurrences by sam_idx
            E = occ.sum()
            if E == 0:
                continue
            assert E < TWO16, E
            events = numpy.zeros(E, event_dt)
            events['grp_id'] = src.src_group_id
            i = 0
            for sam_idx in range(nr):  # numpy.ndenumerate would be slower
                for ses_idx, num_occ in enumerate(n_occ[sam_idx]):
                    for _ in range(num_occ):
                        events[i]['sample'] = sam_idx
                        events[i]['ses'] = ses_idx + 1
                        i += 1

        # setting event IDs based on the rupture serial and the sample
        ebr = EBRupture(rup, src.id, indices, events)
        start = 0
        for sam_idx, n in enumerate(occ):
            eids = (U64(TWO32 * ebr.serial + TWO16 * rlzs[sam_idx]) +
                    numpy.arange(n, dtype=U64))
            ebr.events[start:start + len(eids)]['eid'] = eids
            start += len(eids)
        ebrs.append(ebr)

    return ebrs
示例#4
0
 def __iter__(self):
     self.dstore.open('r')  # if needed
     attrs = self.dstore.get_attrs('ruptures')
     code2cls = {}  # code -> rupture_cls, surface_cls
     for key, val in attrs.items():
         if key.startswith('code_'):
             code2cls[int(key[5:])] = [classes[v] for v in val.split()]
     grp_trt = self.dstore['csm_info'].grp_by("trt")
     events = self.dstore['events']
     ruptures = self.dstore['ruptures'][self.mask]
     rupgeoms = self.dstore['rupgeoms'][self.mask]
     # NB: ruptures.sort(order='serial') causes sometimes a SystemError:
     # <ufunc 'greater'> returned a result with an error set
     # this is way I am sorting with Python and not with numpy below
     data = sorted(
         (serial, ridx) for ridx, serial in enumerate(ruptures['serial']))
     for serial, ridx in data:
         rec = ruptures[ridx]
         evs = events[rec['eidx1']:rec['eidx2']]
         if self.grp_id is not None and self.grp_id != rec['grp_id']:
             continue
         mesh = numpy.zeros((3, rec['sy'], rec['sz']), F32)
         for i, arr in enumerate(rupgeoms[ridx]):  # i = 0, 1, 2
             mesh[i] = arr.reshape(rec['sy'], rec['sz'])
         rupture_cls, surface_cls = code2cls[rec['code']]
         rupture = object.__new__(rupture_cls)
         rupture.serial = serial
         rupture.surface = object.__new__(surface_cls)
         rupture.mag = rec['mag']
         rupture.rake = rec['rake']
         rupture.seed = rec['seed']
         rupture.hypocenter = geo.Point(*rec['hypo'])
         rupture.occurrence_rate = rec['occurrence_rate']
         rupture.tectonic_region_type = grp_trt[rec['grp_id']]
         pmfx = rec['pmfx']
         if pmfx != -1:
             rupture.pmf = self.dstore['pmfs'][pmfx]
         if surface_cls is geo.PlanarSurface:
             rupture.surface = geo.PlanarSurface.from_array(mesh[:, 0, :])
         elif surface_cls is geo.MultiSurface:
             # mesh has shape (3, n, 4)
             rupture.surface.__init__([
                 geo.PlanarSurface.from_array(mesh[:, i, :])
                 for i in range(mesh.shape[1])
             ])
         elif surface_cls is geo.GriddedSurface:
             # fault surface, strike and dip will be computed
             rupture.surface.strike = rupture.surface.dip = None
             rupture.surface.mesh = Mesh(*mesh)
         else:
             # fault surface, strike and dip will be computed
             rupture.surface.strike = rupture.surface.dip = None
             rupture.surface.__init__(RectangularMesh(*mesh))
         ebr = EBRupture(rupture, (), evs)
         ebr.eidx1 = rec['eidx1']
         ebr.eidx2 = rec['eidx2']
         # not implemented: rupture_slip_direction
         yield ebr
示例#5
0
    def _read_scenario_ruptures(self):
        oq = self.oqparam
        gsim_lt = readinput.get_gsim_lt(self.oqparam)
        G = gsim_lt.get_num_paths()
        if oq.calculation_mode.startswith('scenario'):
            ngmfs = oq.number_of_ground_motion_fields
        if oq.inputs['rupture_model'].endswith('.xml'):
            self.gsims = [gsim_rlz.value[0] for gsim_rlz in gsim_lt]
            self.cmaker = ContextMaker('*', self.gsims, {
                'maximum_distance': oq.maximum_distance,
                'imtls': oq.imtls
            })
            rup = readinput.get_rupture(oq)
            if self.N > oq.max_sites_disagg:  # many sites, split rupture
                ebrs = [
                    EBRupture(copyobj(rup, rup_id=rup.rup_id + i),
                              0,
                              0,
                              G,
                              e0=i * G) for i in range(ngmfs)
                ]
            else:  # keep a single rupture with a big occupation number
                ebrs = [EBRupture(rup, 0, 0, G * ngmfs, rup.rup_id)]
            aw = get_rup_array(ebrs, self.srcfilter)
            if len(aw) == 0:
                raise RuntimeError(
                    'The rupture is too far from the sites! Please check the '
                    'maximum_distance and the position of the rupture')
        elif oq.inputs['rupture_model'].endswith('.csv'):
            aw = readinput.get_ruptures(oq.inputs['rupture_model'])
            num_gsims = numpy.array(
                [len(gsim_lt.values[trt]) for trt in gsim_lt.values], U32)
            if oq.calculation_mode.startswith('scenario'):
                # rescale n_occ
                aw['n_occ'] *= ngmfs * num_gsims[aw['trt_smr']]
        rup_array = aw.array
        hdf5.extend(self.datastore['rupgeoms'], aw.geom)

        if len(rup_array) == 0:
            raise RuntimeError(
                'There are no sites within the maximum_distance'
                ' of %s km from the rupture' %
                oq.maximum_distance(rup.tectonic_region_type, rup.mag))

        # check the number of branchsets
        branchsets = len(gsim_lt._ltnode)
        if len(rup_array) == 1 and branchsets > 1:
            raise InvalidFile(
                '%s for a scenario calculation must contain a single '
                'branchset, found %d!' % (oq.inputs['job_ini'], branchsets))

        fake = logictree.FullLogicTree.fake(gsim_lt)
        self.realizations = fake.get_realizations()
        self.datastore['full_lt'] = fake
        self.store_rlz_info({})  # store weights
        self.save_params()
        imp = calc.RuptureImporter(self.datastore)
        imp.import_rups_events(rup_array, get_rupture_getters)
示例#6
0
 def __iter__(self):
     self.dstore.open()  # if needed
     oq = self.dstore['oqparam']
     grp_trt = self.dstore['csm_info'].grp_by("trt")
     ruptures = self.dstore['ruptures'][self.mask]
     # NB: ruptures.sort(order='serial') causes sometimes a SystemError:
     # <ufunc 'greater'> returned a result with an error set
     # this is way I am sorting with Python and not with numpy below
     data = sorted((ser, idx) for idx, ser in enumerate(ruptures['serial']))
     for serial, ridx in data:
         rec = ruptures[ridx]
         evs = self.dstore['events'][rec['eidx1']:rec['eidx2']]
         if self.grp_id is not None and self.grp_id != rec['grp_id']:
             continue
         mesh = rec['points'].reshape(rec['sx'], rec['sy'], rec['sz'])
         rupture_cls, surface_cls, source_cls = BaseRupture.types[
             rec['code']]
         rupture = object.__new__(rupture_cls)
         rupture.surface = object.__new__(surface_cls)
         # MISSING: case complex_fault_mesh_spacing != rupture_mesh_spacing
         if 'Complex' in surface_cls.__name__:
             mesh_spacing = oq.complex_fault_mesh_spacing
         else:
             mesh_spacing = oq.rupture_mesh_spacing
         rupture.source_typology = source_cls
         rupture.mag = rec['mag']
         rupture.rake = rec['rake']
         rupture.seed = rec['seed']
         rupture.hypocenter = geo.Point(*rec['hypo'])
         rupture.occurrence_rate = rec['occurrence_rate']
         rupture.tectonic_region_type = grp_trt[rec['grp_id']]
         pmfx = rec['pmfx']
         # disable check on PlanarSurface to support UCERF ruptures
         with mock.patch(
                 'openquake.hazardlib.geo.surface.PlanarSurface.'
                 'IMPERFECT_RECTANGLE_TOLERANCE', numpy.inf):
             if pmfx != -1:
                 rupture.pmf = self.dstore['pmfs'][pmfx]
             if surface_cls is geo.PlanarSurface:
                 rupture.surface = geo.PlanarSurface.from_array(
                     mesh_spacing, rec['points'])
             elif surface_cls.__name__.endswith('MultiSurface'):
                 rupture.surface.__init__([
                     geo.PlanarSurface.from_array(mesh_spacing,
                                                  m1.flatten())
                     for m1 in mesh
                 ])
             else:  # fault surface, strike and dip will be computed
                 rupture.surface.strike = rupture.surface.dip = None
                 m = mesh[0]
                 rupture.surface.mesh = RectangularMesh(
                     m['lon'], m['lat'], m['depth'])
         ebr = EBRupture(rupture, (), evs, serial)
         ebr.eidx1 = rec['eidx1']
         ebr.eidx2 = rec['eidx2']
         # not implemented: rupture_slip_direction
         yield ebr
示例#7
0
 def get_ruptures(self, srcfilter):
     """
     :returns: a list of EBRuptures filtered by bounding box
     """
     ebrs = []
     with datastore.read(self.filename) as dstore:
         rupgeoms = dstore['rupgeoms']
         for e0, rec in zip(self.e0, self.rup_array):
             if srcfilter.integration_distance:
                 sids = srcfilter.close_sids(rec, self.trt)
                 if len(sids) == 0:  # the rupture is far away
                     continue
             else:
                 sids = None
             mesh = numpy.zeros((3, rec['sy'], rec['sz']), F32)
             geom = rupgeoms[rec['gidx1']:rec['gidx2']].reshape(
                 rec['sy'], rec['sz'])
             mesh[0] = geom['lon']
             mesh[1] = geom['lat']
             mesh[2] = geom['depth']
             rupture_cls, surface_cls = code2cls[rec['code']]
             rupture = object.__new__(rupture_cls)
             rupture.rup_id = rec['serial']
             rupture.surface = object.__new__(surface_cls)
             rupture.mag = rec['mag']
             rupture.rake = rec['rake']
             rupture.hypocenter = geo.Point(*rec['hypo'])
             rupture.occurrence_rate = rec['occurrence_rate']
             rupture.tectonic_region_type = self.trt
             if surface_cls is geo.PlanarSurface:
                 rupture.surface = geo.PlanarSurface.from_array(mesh[:,
                                                                     0, :])
             elif surface_cls is geo.MultiSurface:
                 # mesh has shape (3, n, 4)
                 rupture.surface.__init__([
                     geo.PlanarSurface.from_array(mesh[:, i, :])
                     for i in range(mesh.shape[1])
                 ])
             elif surface_cls is geo.GriddedSurface:
                 # fault surface, strike and dip will be computed
                 rupture.surface.strike = rupture.surface.dip = None
                 rupture.surface.mesh = Mesh(*mesh)
             else:
                 # fault surface, strike and dip will be computed
                 rupture.surface.strike = rupture.surface.dip = None
                 rupture.surface.__init__(RectangularMesh(*mesh))
             grp_id = rec['grp_id']
             ebr = EBRupture(rupture, rec['srcidx'], grp_id, rec['n_occ'],
                             self.samples)
             # not implemented: rupture_slip_direction
             ebr.sids = sids
             ebr.ridx = rec['id']
             ebr.e0 = 0 if self.e0 is None else e0
             ebrs.append(ebr)
     return ebrs
示例#8
0
def _get_ruptures(dstore, events, grp_ids, rup_id):
    oq = dstore['oqparam']
    grp_trt = dstore['csm_info'].grp_trt()
    for grp_id in grp_ids:
        trt = grp_trt[grp_id]
        grp = 'grp-%02d' % grp_id
        try:
            recs = dstore['ruptures/' + grp]
        except KeyError:  # no ruptures in grp
            continue
        for rec in recs:
            if rup_id is not None and rup_id != rec['serial']:
                continue
            mesh = rec['points'].reshape(rec['sx'], rec['sy'], rec['sz'])
            rupture_cls, surface_cls, source_cls = BaseRupture.types[
                rec['code']]
            rupture = object.__new__(rupture_cls)
            rupture.surface = object.__new__(surface_cls)
            # MISSING: case complex_fault_mesh_spacing != rupture_mesh_spacing
            if 'Complex' in surface_cls.__name__:
                mesh_spacing = oq.complex_fault_mesh_spacing
            else:
                mesh_spacing = oq.rupture_mesh_spacing
            rupture.source_typology = source_cls
            rupture.mag = rec['mag']
            rupture.rake = rec['rake']
            rupture.seed = rec['seed']
            rupture.hypocenter = geo.Point(*rec['hypo'])
            rupture.occurrence_rate = rec['occurrence_rate']
            rupture.tectonic_region_type = trt
            pmfx = rec['pmfx']
            if pmfx != -1:
                rupture.pmf = dstore['pmfs/' + grp][pmfx]
            if surface_cls is geo.PlanarSurface:
                rupture.surface = geo.PlanarSurface.from_array(
                    mesh_spacing, rec['points'])
            elif surface_cls.__name__.endswith('MultiSurface'):
                rupture.surface.__init__([
                    geo.PlanarSurface.from_array(mesh_spacing, m1.flatten())
                    for m1 in mesh
                ])
            else:  # fault surface, strike and dip will be computed
                rupture.surface.strike = rupture.surface.dip = None
                m = mesh[0]
                rupture.surface.mesh = RectangularMesh(m['lon'], m['lat'],
                                                       m['depth'])
            sids = dstore['sids'][rec['sidx']]
            evs = events[rec['eidx1']:rec['eidx2']]
            ebr = EBRupture(rupture, sids, evs, grp_id, rec['serial'])
            ebr.eidx1 = rec['eidx1']
            ebr.eidx2 = rec['eidx2']
            ebr.sidx = rec['sidx']
            # not implemented: rupture_slip_direction
            yield ebr
示例#9
0
 def get_eid_rlz(self, monitor=None):
     """
     :returns: a composite array with the associations eid->rlz
     """
     eid_rlz = []
     for rup in self.rup_array:
         ebr = EBRupture(mock.Mock(serial=rup['serial']), rup['srcidx'],
                         self.grp_id, rup['n_occ'], self.samples)
         for rlz, eids in ebr.get_eids_by_rlz(self.rlzs_by_gsim).items():
             for eid in eids:
                 eid_rlz.append((eid, rlz))
     return numpy.array(eid_rlz, [('eid', U64), ('rlz', U16)])
示例#10
0
 def get_eid_rlz(self):
     """
     :returns: a composite array with the associations eid->rlz
     """
     eid_rlz = []
     for rup in self.proxies:
         ebr = EBRupture(mock.Mock(rup_id=rup['serial']), rup['source_id'],
                         self.grp_id, rup['n_occ'], self.samples)
         for rlz_id, eids in ebr.get_eids_by_rlz(self.rlzs_by_gsim).items():
             for eid in eids:
                 eid_rlz.append((eid + rup['e0'], rup['id'], rlz_id))
     return numpy.array(eid_rlz, events_dt)
示例#11
0
 def get_eid_rlz(self, monitor=None):
     """
     :returns: a composite array with the associations eid->rlz
     """
     eid_rlz = []
     for rup in self.rup_array:
         ebr = EBRupture(mock.Mock(serial=rup['serial']), rup['srcidx'],
                         self.grp_id, rup['n_occ'], self.samples)
         for rlz, eids in ebr.get_eids_by_rlz(self.rlzs_by_gsim).items():
             for eid in eids:
                 eid_rlz.append((eid, rlz))
     return numpy.array(eid_rlz, [('eid', U64), ('rlz', U16)])
示例#12
0
 def get_ruptures(self, srcfilter=calc.filters.nofilter):
     """
     :returns: a list of EBRuptures filtered by bounding box
     """
     ebrs = []
     with datastore.read(self.filename) as dstore:
         rupgeoms = dstore['rupgeoms']
         for rec in self.rup_array:
             if srcfilter.integration_distance:
                 sids = srcfilter.close_sids(rec, self.trt, rec['mag'])
                 if len(sids) == 0:  # the rupture is far away
                     continue
             else:
                 sids = None
             mesh = numpy.zeros((3, rec['sy'], rec['sz']), F32)
             geom = rupgeoms[rec['gidx1']:rec['gidx2']].reshape(
                 rec['sy'], rec['sz'])
             mesh[0] = geom['lon']
             mesh[1] = geom['lat']
             mesh[2] = geom['depth']
             rupture_cls, surface_cls = self.code2cls[rec['code']]
             rupture = object.__new__(rupture_cls)
             rupture.serial = rec['serial']
             rupture.surface = object.__new__(surface_cls)
             rupture.mag = rec['mag']
             rupture.rake = rec['rake']
             rupture.hypocenter = geo.Point(*rec['hypo'])
             rupture.occurrence_rate = rec['occurrence_rate']
             rupture.tectonic_region_type = self.trt
             if surface_cls is geo.PlanarSurface:
                 rupture.surface = geo.PlanarSurface.from_array(
                     mesh[:, 0, :])
             elif surface_cls is geo.MultiSurface:
                 # mesh has shape (3, n, 4)
                 rupture.surface.__init__([
                     geo.PlanarSurface.from_array(mesh[:, i, :])
                     for i in range(mesh.shape[1])])
             elif surface_cls is geo.GriddedSurface:
                 # fault surface, strike and dip will be computed
                 rupture.surface.strike = rupture.surface.dip = None
                 rupture.surface.mesh = Mesh(*mesh)
             else:
                 # fault surface, strike and dip will be computed
                 rupture.surface.strike = rupture.surface.dip = None
                 rupture.surface.__init__(RectangularMesh(*mesh))
             grp_id = rec['grp_id']
             ebr = EBRupture(rupture, rec['srcidx'], grp_id,
                             rec['n_occ'], self.samples)
             # not implemented: rupture_slip_direction
             ebr.sids = sids
             ebrs.append(ebr)
     return ebrs
示例#13
0
    def _read_scenario_ruptures(self):
        oq = self.oqparam
        gsim_lt = readinput.get_gsim_lt(self.oqparam)
        G = gsim_lt.get_num_paths()
        if oq.inputs['rupture_model'].endswith('.xml'):
            ngmfs = oq.number_of_ground_motion_fields
            self.gsims = readinput.get_gsims(oq)
            self.cmaker = ContextMaker('*', self.gsims, {
                'maximum_distance': oq.maximum_distance,
                'imtls': oq.imtls
            })
            rup = readinput.get_rupture(oq)
            mesh = surface_to_array(rup.surface).transpose(1, 2, 0).flatten()
            if self.N > oq.max_sites_disagg:  # many sites, split rupture
                ebrs = [
                    EBRupture(copyobj(rup, rup_id=rup.rup_id + i),
                              0,
                              0,
                              G,
                              e0=i * G) for i in range(ngmfs)
                ]
                meshes = numpy.array([mesh] * ngmfs, object)
            else:  # keep a single rupture with a big occupation number
                ebrs = [EBRupture(rup, 0, 0, G * ngmfs, rup.rup_id)]
                meshes = numpy.array([mesh] * ngmfs, object)
            rup_array = get_rup_array(ebrs, self.srcfilter).array
            hdf5.extend(self.datastore['rupgeoms'], meshes)
        elif oq.inputs['rupture_model'].endswith('.csv'):
            aw = readinput.get_ruptures(oq.inputs['rupture_model'])
            aw.array['n_occ'] = G
            rup_array = aw.array
            hdf5.extend(self.datastore['rupgeoms'], aw.geom)

        if len(rup_array) == 0:
            raise RuntimeError(
                'There are no sites within the maximum_distance'
                ' of %s km from the rupture' %
                oq.maximum_distance(rup.tectonic_region_type, rup.mag))

        # check the number of branchsets
        branchsets = len(gsim_lt._ltnode)
        if len(rup_array) == 1 and branchsets > 1:
            raise InvalidFile(
                '%s for a scenario calculation must contain a single '
                'branchset, found %d!' % (oq.inputs['job_ini'], branchsets))

        fake = logictree.FullLogicTree.fake(gsim_lt)
        self.realizations = fake.get_realizations()
        self.datastore['full_lt'] = fake
        self.store_rlz_info({})  # store weights
        self.save_params()
        calc.RuptureImporter(self.datastore).import_rups(rup_array)
示例#14
0
 def get_eid_rlz(self, proxies, rlzs_by_gsim):
     """
     :returns: a composite array with the associations eid->rlz
     """
     eid_rlz = []
     for rup in proxies:
         ebr = EBRupture(Mock(rup_id=rup['seed']), rup['source_id'],
                         rup['trt_smr'], rup['n_occ'], e0=rup['e0'])
         ebr.scenario = 'scenario' in self.oqparam.calculation_mode
         for rlz_id, eids in ebr.get_eids_by_rlz(rlzs_by_gsim).items():
             for eid in eids:
                 eid_rlz.append((eid, rup['id'], rlz_id))
     return numpy.array(eid_rlz, events_dt)
示例#15
0
 def get_eid_rlz(self):
     """
     :returns: a composite array with the associations eid->rlz
     """
     eid_rlz = []
     for e0, rup in zip(self.e0, self.rup_array):
         rup_id = rup['rup_id']
         ebr = EBRupture(mock.Mock(rup_id=rup_id), rup['srcidx'],
                         self.grp_id, rup['n_occ'], self.samples)
         for rlz_id, eids in ebr.get_eids_by_rlz(self.rlzs_by_gsim).items():
             for eid in eids:
                 eid_rlz.append((eid + e0, rup_id, rlz_id))
     return numpy.array(eid_rlz, events_dt)
示例#16
0
 def __iter__(self):
     self.dstore.open()  # if needed
     oq = self.dstore['oqparam']
     grp_trt = self.dstore['csm_info'].grp_trt()
     recs = self.dstore['ruptures'][self.slice]
     for rec in recs:
         if self.rup_id is not None and self.rup_id != rec['serial']:
             continue
         evs = self.dstore['events'][rec['eidx1']:rec['eidx2']]
         grp_id = evs['grp_id'][0]
         if self.grp_id is not None and self.grp_id != grp_id:
             continue
         mesh = rec['points'].reshape(rec['sx'], rec['sy'], rec['sz'])
         rupture_cls, surface_cls, source_cls = BaseRupture.types[
             rec['code']]
         rupture = object.__new__(rupture_cls)
         rupture.surface = object.__new__(surface_cls)
         # MISSING: case complex_fault_mesh_spacing != rupture_mesh_spacing
         if 'Complex' in surface_cls.__name__:
             mesh_spacing = oq.complex_fault_mesh_spacing
         else:
             mesh_spacing = oq.rupture_mesh_spacing
         rupture.source_typology = source_cls
         rupture.mag = rec['mag']
         rupture.rake = rec['rake']
         rupture.seed = rec['seed']
         rupture.hypocenter = geo.Point(*rec['hypo'])
         rupture.occurrence_rate = rec['occurrence_rate']
         rupture.tectonic_region_type = grp_trt[grp_id]
         pmfx = rec['pmfx']
         if pmfx != -1:
             rupture.pmf = self.dstore['pmfs'][pmfx]
         if surface_cls is geo.PlanarSurface:
             rupture.surface = geo.PlanarSurface.from_array(
                 mesh_spacing, rec['points'])
         elif surface_cls.__name__.endswith('MultiSurface'):
             rupture.surface.__init__([
                 geo.PlanarSurface.from_array(mesh_spacing, m1.flatten())
                 for m1 in mesh
             ])
         else:  # fault surface, strike and dip will be computed
             rupture.surface.strike = rupture.surface.dip = None
             m = mesh[0]
             rupture.surface.mesh = RectangularMesh(m['lon'], m['lat'],
                                                    m['depth'])
         ebr = EBRupture(rupture, (), evs, rec['serial'])
         ebr.eidx1 = rec['eidx1']
         ebr.eidx2 = rec['eidx2']
         # not implemented: rupture_slip_direction
         yield ebr
示例#17
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer and seeds
     """
     oq = self.oqparam
     cinfo = logictree.FullLogicTree.fake(readinput.get_gsim_lt(oq))
     self.realizations = cinfo.get_realizations()
     self.datastore['full_lt'] = cinfo
     if 'rupture_model' not in oq.inputs:
         logging.warning(
             'There is no rupture_model, the calculator will just '
             'import data without performing any calculation')
         super().pre_execute()
         return
     self.rup = readinput.get_rupture(oq)
     self.gsims = readinput.get_gsims(oq)
     R = len(self.gsims)
     self.cmaker = ContextMaker(
         '*', self.gsims, {
             'maximum_distance': oq.maximum_distance,
             'filter_distance': oq.filter_distance
         })
     super().pre_execute()
     self.datastore['oqparam'] = oq
     self.store_rlz_info({})
     rlzs_by_gsim = cinfo.get_rlzs_by_gsim(0)
     E = oq.number_of_ground_motion_fields
     n_occ = numpy.array([E])
     ebr = EBRupture(self.rup, 0, 0, n_occ)
     ebr.e0 = 0
     events = numpy.zeros(E * R, events_dt)
     for rlz, eids in ebr.get_eids_by_rlz(rlzs_by_gsim).items():
         events[rlz * E:rlz * E + E]['id'] = eids
         events[rlz * E:rlz * E + E]['rlz_id'] = rlz
     self.datastore['events'] = self.events = events
     rupser = calc.RuptureSerializer(self.datastore)
     rup_array = get_rup_array([ebr], self.src_filter())
     if len(rup_array) == 0:
         maxdist = oq.maximum_distance(self.rup.tectonic_region_type,
                                       self.rup.mag)
         raise RuntimeError('There are no sites within the maximum_distance'
                            ' of %s km from the rupture' % maxdist)
     rupser.save(rup_array)
     rupser.close()
     self.computer = GmfComputer(ebr, self.sitecol, oq.imtls, self.cmaker,
                                 oq.truncation_level, oq.correl_model,
                                 self.amplifier)
     M32 = (numpy.float32, len(self.oqparam.imtls))
     self.sig_eps_dt = [('eid', numpy.uint64), ('sig', M32), ('eps', M32)]
示例#18
0
def build_ruptures(sources, src_filter, param, monitor):
    """
    :param sources: a list with a single UCERF source
    :param param: extra parameters
    :param monitor: a Monitor instance
    :returns: an AccumDict grp_id -> EBRuptures
    """
    [src] = sources
    res = AccumDict()
    res.calc_times = []
    sampl_mon = monitor('sampling ruptures', measuremem=True)
    res.trt = DEFAULT_TRT
    background_sids = src.get_background_sids(src_filter)
    samples = getattr(src, 'samples', 1)
    n_occ = AccumDict(accum=0)
    t0 = time.time()
    with sampl_mon:
        for sam_idx in range(samples):
            for ses_idx, ses_seed in param['ses_seeds']:
                seed = sam_idx * TWO16 + ses_seed
                rups, occs = generate_event_set(src, background_sids,
                                                src_filter, ses_idx, seed)
                for rup, occ in zip(rups, occs):
                    n_occ[rup] += occ
    tot_occ = sum(n_occ.values())
    dic = {'eff_ruptures': {src.src_group_id: src.num_ruptures}}
    eb_ruptures = [
        EBRupture(rup, src.id, src.src_group_id, n, samples)
        for rup, n in n_occ.items()
    ]
    dic['rup_array'] = stochastic.get_rup_array(eb_ruptures, src_filter)
    dt = time.time() - t0
    n = len(src_filter.sitecol)
    dic['calc_times'] = {src.id: numpy.array([tot_occ, n, dt], F32)}
    return dic
示例#19
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer and seeds
     """
     oq = self.oqparam
     cinfo = source.CompositionInfo.fake(readinput.get_gsim_lt(oq))
     self.datastore['csm_info'] = cinfo
     if 'rupture_model' not in oq.inputs:
         logging.warn('There is no rupture_model, the calculator will just '
                      'import data without performing any calculation')
         super().pre_execute()
         return
     self.rup = readinput.get_rupture(oq)
     self.gsims = readinput.get_gsims(oq)
     self.cmaker = ContextMaker(self.gsims, oq.maximum_distance,
                                oq.filter_distance)
     super().pre_execute()
     self.datastore['oqparam'] = oq
     self.rlzs_assoc = cinfo.get_rlzs_assoc()
     E = oq.number_of_ground_motion_fields
     events = numpy.zeros(E, readinput.stored_event_dt)
     events['eid'] = numpy.arange(E)
     ebr = EBRupture(self.rup, self.sitecol.sids, events)
     self.datastore['events'] = ebr.events
     rupser = calc.RuptureSerializer(self.datastore)
     rupser.save([ebr])
     rupser.close()
     self.computer = GmfComputer(ebr, self.sitecol, oq.imtls, self.cmaker,
                                 oq.truncation_level, oq.get_correl_model())
示例#20
0
def _build_eb_ruptures(src, num_occ_by_rup, cmaker, s_sites, rup_mon):
    # Filter the ruptures stored in the dictionary num_occ_by_rup and
    # yield pairs (rupture, <list of associated EBRuptures>).
    # NB: s_sites can be None if cmaker.maximum_distance is False, then
    # the contexts are not computed and the ruptures not filtered
    for rup in sorted(num_occ_by_rup, key=operator.attrgetter('rup_no')):
        if cmaker.maximum_distance:
            with rup_mon:
                try:
                    rup.sctx, rup.dctx = cmaker.make_contexts(s_sites, rup)
                    indices = rup.sctx.sids
                except FarAwayRupture:
                    # ignore ruptures which are far away
                    del num_occ_by_rup[rup]  # save memory
                    continue
        else:
            indices = ()

        # creating EBRuptures
        events = []
        for (sam_idx, ses_idx), num_occ in sorted(num_occ_by_rup[rup].items()):
            for _ in range(num_occ):
                # NB: the 0 below is a placeholder; the right eid will be
                # set a bit later, in set_eids
                events.append((0, src.src_group_id, ses_idx, sam_idx))
        if events:
            yield EBRupture(rup, indices, numpy.array(events, event_dt))
示例#21
0
def ruptures_from_erf(sources, srcfilter, param, monitor=Monitor()):
    """
    :param sources:
        a sequence of sources of the same group
    :param srcfilter:
        SourceFilter instance used also for bounding box post filtering
    :param param:
        a dictionary of additional parameters including
        ses_per_logic_tree_path
    :param monitor:
        monitor instance
    :yields:
        dictionaries with keys rup_array, calc_times
    """
    # AccumDict of arrays with 4 elements num_ruptures, num_sites, calc_time, sources_ids

    trt = sources[0].tectonic_region_type

    eb_ruptures = []
    eff_ruptures = 0

    # AccumDict of arrays with 2 elements weight, calc_time
    calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))

    for src, _sites in srcfilter(sources):
        nr = src.num_ruptures
        eff_ruptures += nr
        t0 = time.time()
        if len(eb_ruptures) > MAX_RUPTURES:
            # yield partial result to avoid running out of memory

            source_ids = get_source_ids(eb_ruptures, srcfilter)

            yield AccumDict(
                dict(rup_array=get_rup_array(eb_ruptures, srcfilter),
                     calc_times={},
                     eff_ruptures={},
                     source_ids=source_ids))
            eb_ruptures.clear()

        eb_ruptures = []

        for rup, grp_id in src.ruptures_from_erf():
            eb_ruptures.append(EBRupture(rup, src.id, grp_id, 1, 1))

        dt = time.time() - t0
        try:
            n_sites = len(_sites)
        except (TypeError, ValueError):  # for None or a closed dataset
            n_sites = 0
        calc_times[src.source_id] += numpy.array([nr, n_sites, dt])

    rup_array = get_rup_array(eb_ruptures, srcfilter)
    source_ids = get_source_ids(eb_ruptures, srcfilter)

    yield AccumDict(
        dict(rup_array=rup_array,
             calc_times=calc_times,
             eff_ruptures={trt: eff_ruptures},
             source_ids=source_ids))
示例#22
0
def get_rupture_sitecol(oqparam, sitecol):
    """
    Read the `rupture_model` file and by filter the site collection

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :returns:
        a pair (EBRupture, FilteredSiteCollection)
    """
    rup_model = oqparam.inputs['rupture_model']
    [rup_node] = nrml.read(rup_model)
    conv = sourceconverter.RuptureConverter(
        oqparam.rupture_mesh_spacing, oqparam.complex_fault_mesh_spacing)
    rup = conv.convert_node(rup_node)
    rup.tectonic_region_type = '*'  # there is not TRT for scenario ruptures
    rup.seed = oqparam.random_seed
    maxdist = oqparam.maximum_distance['default']
    sc = calc.filters.filter_sites_by_distance_to_rupture(
        rup, maxdist, sitecol)
    if sc is None:
        raise RuntimeError(
            'All sites were filtered out! maximum_distance=%s km' %
            maxdist)
    n = oqparam.number_of_ground_motion_fields
    events = numpy.zeros(n, stored_event_dt)
    events['eid'] = numpy.arange(n)
    ebr = EBRupture(rup, sc.sids, events)
    return ebr, sc
示例#23
0
def _build_eb_ruptures(src, num_occ_by_rup, cmaker, s_sites, random_seed,
                       rup_mon):
    """
    Filter the ruptures stored in the dictionary num_occ_by_rup and
    yield pairs (rupture, <list of associated EBRuptures>)
    """
    for rup in sorted(num_occ_by_rup, key=operator.attrgetter('rup_no')):
        rup.serial = rup.seed - random_seed + 1
        with rup_mon:
            try:
                rup.ctx = cmaker.make_contexts(s_sites, rup)
                indices = rup.ctx[0].sids
            except FarAwayRupture:
                # ignore ruptures which are far away
                del num_occ_by_rup[rup]  # save memory
                continue

        # creating EBRuptures
        events = []
        for (sam_idx, ses_idx), num_occ in sorted(num_occ_by_rup[rup].items()):
            for _ in range(num_occ):
                # NB: the 0 below is a placeholder; the right eid will be
                # set a bit later, in set_eids
                events.append((0, src.src_group_id, ses_idx, sam_idx))
        if events:
            yield EBRupture(rup, indices, numpy.array(events, event_dt))
示例#24
0
def build_eb_ruptures(src, num_ses, cmaker, s_sites, rup_n_occ=()):
    """
    :param src: a source object
    :param num_ses: number of stochastic event sets
    :param cmaker: a ContextMaker instance
    :param s_sites: a (filtered) site collection
    :param rup_n_occ: (rup, n_occ) pairs [inferred from the source]
    :returns: a list of EBRuptures
    """
    # NB: s_sites can be None if cmaker.maximum_distance is False, then
    # the contexts are not computed and the ruptures not filtered
    ebrs = []
    samples = getattr(src, 'samples', 1)
    if rup_n_occ == ():
        # NB: the number of occurrences is very low, << 1, so it is
        # more efficient to filter only the ruptures that occur, i.e.
        # to call sample_ruptures *before* the filtering
        rup_n_occ = src.sample_ruptures(samples, num_ses, cmaker.ir_mon)
    for rup, n_occ in rup_n_occ:
        if cmaker.maximum_distance:
            with cmaker.ctx_mon:
                try:
                    rup.sctx, rup.dctx = cmaker.make_contexts(s_sites, rup)
                    indices = rup.sctx.sids
                except FarAwayRupture:
                    continue
        else:
            indices = ()

        ebr = EBRupture(rup, src.id, src.src_group_id, indices, n_occ, samples)
        ebrs.append(ebr)

    return ebrs
示例#25
0
def sample_ruptures(sources, srcfilter, param, monitor=Monitor()):
    """
    :param sources:
        a sequence of sources of the same group
    :param srcfilter:
        SourceFilter instance used also for bounding box post filtering
    :param param:
        a dictionary of additional parameters including
        ses_per_logic_tree_path
    :param monitor:
        monitor instance
    :yields:
        dictionaries with keys rup_array, calc_times
    """
    # AccumDict of arrays with 3 elements num_ruptures, num_sites, calc_time
    calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
    # Compute and save stochastic event sets
    num_ses = param['ses_per_logic_tree_path']
    trt = sources[0].tectonic_region_type
    # Compute the number of occurrences of the source group. This is used
    # for cluster groups or groups with mutually exclusive sources.
    if (getattr(sources, 'atomic', False)
            and getattr(sources, 'cluster', False)):
        eb_ruptures, calc_times = sample_cluster(sources, srcfilter, num_ses,
                                                 param)

        # Yield ruptures
        yield AccumDict(
            dict(rup_array=get_rup_array(eb_ruptures, srcfilter),
                 calc_times=calc_times,
                 eff_ruptures={trt: len(eb_ruptures)}))
    else:
        eb_ruptures = []
        eff_ruptures = 0
        # AccumDict of arrays with 2 elements weight, calc_time
        calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
        for src, _ in srcfilter.filter(sources):
            nr = src.num_ruptures
            eff_ruptures += nr
            t0 = time.time()
            if len(eb_ruptures) > MAX_RUPTURES:
                # yield partial result to avoid running out of memory
                yield AccumDict(
                    dict(rup_array=get_rup_array(eb_ruptures, srcfilter),
                         calc_times={},
                         eff_ruptures={}))
                eb_ruptures.clear()
            samples = getattr(src, 'samples', 1)
            for rup, trt_smrlz, n_occ in src.sample_ruptures(
                    samples * num_ses, param['ses_seed']):
                ebr = EBRupture(rup, src.source_id, trt_smrlz, n_occ)
                eb_ruptures.append(ebr)
            dt = time.time() - t0
            calc_times[src.id] += numpy.array([nr, src.nsites, dt])
        rup_array = get_rup_array(eb_ruptures, srcfilter)
        yield AccumDict(
            dict(rup_array=rup_array,
                 calc_times=calc_times,
                 eff_ruptures={trt: eff_ruptures}))
示例#26
0
def sample_ruptures(sources, srcfilter, param, monitor=Monitor()):
    """
    :param sources:
        a sequence of sources of the same group
    :param srcfilter:
        SourceFilter instance used also for bounding box post filtering
    :param param:
        a dictionary of additional parameters including
        ses_per_logic_tree_path
    :param monitor:
        monitor instance
    :yields:
        dictionaries with keys rup_array, calc_times
    """
    # AccumDict of arrays with 3 elements num_ruptures, num_sites, calc_time
    calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
    # Compute and save stochastic event sets
    num_ses = param['ses_per_logic_tree_path']
    [grp_id] = set(src.src_group_id for src in sources)
    # Compute the number of occurrences of the source group. This is used
    # for cluster groups or groups with mutually exclusive sources.
    if (getattr(sources, 'atomic', False) and
            getattr(sources, 'cluster', False)):
        eb_ruptures, calc_times = sample_cluster(
            sources, srcfilter, num_ses, param)

        # Yield ruptures
        yield AccumDict(rup_array=get_rup_array(eb_ruptures, srcfilter),
                        calc_times=calc_times,
                        eff_ruptures={grp_id: len(eb_ruptures)})
    else:
        eb_ruptures = []
        eff_ruptures = 0
        # AccumDict of arrays with 2 elements weight, calc_time
        calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
        for src, _sites in srcfilter(sources):
            eff_ruptures += 1
            t0 = time.time()
            if len(eb_ruptures) > MAX_RUPTURES:
                # yield partial result to avoid running out of memory
                yield AccumDict(rup_array=get_rup_array(eb_ruptures,
                                                        srcfilter),
                                calc_times={}, eff_ruptures={})
                eb_ruptures.clear()
            samples = getattr(src, 'samples', 1)
            n_occ = 0
            for rup, n_occ in src.sample_ruptures(samples * num_ses):
                ebr = EBRupture(rup, src.id, grp_id, n_occ, samples)
                eb_ruptures.append(ebr)
                n_occ += ebr.n_occ
            dt = time.time() - t0
            try:
                n_sites = len(_sites)
            except (TypeError, ValueError):  # for None or a closed dataset
                n_sites = 0
            calc_times[src.id] += numpy.array([n_occ, n_sites, dt])
        rup_array = get_rup_array(eb_ruptures, srcfilter)
        yield AccumDict(rup_array=rup_array, calc_times=calc_times,
                        eff_ruptures={grp_id: eff_ruptures})
def compute_ruptures(sources, src_filter, gsims, param, monitor):
    """
    :param sources: a list with a single UCERF source
    :param src_filter: a SourceFilter instance
    :param gsims: a list of GSIMs
    :param param: extra parameters
    :param monitor: a Monitor instance
    :returns: an AccumDict grp_id -> EBRuptures
    """
    [src] = sources
    res = AccumDict()
    res.calc_times = AccumDict()
    serial = 1
    sampl_mon = monitor('sampling ruptures', measuremem=True)
    filt_mon = monitor('filtering ruptures', measuremem=False)
    res.trt = DEFAULT_TRT
    t0 = time.time()
    ebruptures = []
    background_sids = src.get_background_sids(src_filter)
    sitecol = src_filter.sitecol
    idist = src_filter.integration_distance
    for sample in range(param['samples']):
        for ses_idx, ses_seed in param['ses_seeds']:
            seed = sample * event_based.TWO16 + ses_seed
            with sampl_mon:
                rups, n_occs = src.generate_event_set(background_sids,
                                                      src_filter, seed)
            with filt_mon:
                for rup, n_occ in zip(rups, n_occs):
                    rup.seed = seed
                    try:
                        r_sites, rrup = idist.get_closest(sitecol, rup)
                    except FarAwayRupture:
                        continue
                    indices = (numpy.arange(len(r_sites))
                               if r_sites.indices is None else r_sites.indices)
                    events = []
                    for _ in range(n_occ):
                        events.append((0, src.src_group_id, ses_idx, sample))
                    if events:
                        evs = numpy.array(events, calc.event_dt)
                        ebruptures.append(EBRupture(rup, indices, evs, serial))
                        serial += 1
    res.num_events = event_based.set_eids(ebruptures)
    res[src.src_group_id] = ebruptures
    res.calc_times[src.src_group_id] = {
        src.source_id:
        numpy.array([src.weight, len(sitecol),
                     time.time() - t0, 1])
    }
    if not param['save_ruptures']:
        res.events_by_grp = {
            grp_id: event_based.get_events(res[grp_id])
            for grp_id in res
        }
    res.eff_ruptures = {src.src_group_id: src.num_ruptures}
    return res
示例#28
0
def compute_hazard(sources, src_filter, rlzs_by_gsim, param, monitor):
    """
    :param sources: a list with a single UCERF source
    :param src_filter: a SourceFilter instance
    :param rlzs_by_gsim: a dictionary gsim -> rlzs
    :param param: extra parameters
    :param monitor: a Monitor instance
    :returns: an AccumDict grp_id -> EBRuptures
    """
    [src] = sources
    res = AccumDict()
    res.calc_times = []
    serial = 1
    sampl_mon = monitor('sampling ruptures', measuremem=True)
    filt_mon = monitor('filtering ruptures', measuremem=False)
    res.trt = DEFAULT_TRT
    ebruptures = []
    background_sids = src.get_background_sids(src_filter)
    sitecol = src_filter.sitecol
    cmaker = ContextMaker(rlzs_by_gsim, src_filter.integration_distance)
    for sample in range(param['samples']):
        for ses_idx, ses_seed in param['ses_seeds']:
            seed = sample * TWO16 + ses_seed
            with sampl_mon:
                rups, n_occs = generate_event_set(src, background_sids,
                                                  src_filter, seed)
            with filt_mon:
                for rup, n_occ in zip(rups, n_occs):
                    rup.serial = serial
                    try:
                        rup.sctx, rup.dctx = cmaker.make_contexts(sitecol, rup)
                        indices = rup.sctx.sids
                    except FarAwayRupture:
                        continue
                    events = []
                    for _ in range(n_occ):
                        events.append((0, src.src_group_id, ses_idx, sample))
                    if events:
                        evs = numpy.array(events, stochastic.event_dt)
                        ebruptures.append(EBRupture(rup, src.id, indices, evs))
                        serial += 1
    res.num_events = len(stochastic.set_eids(ebruptures))
    res['ruptures'] = {src.src_group_id: ebruptures}
    if param['save_ruptures']:
        res.ruptures_by_grp = {src.src_group_id: ebruptures}
    else:
        res.events_by_grp = {
            src.src_group_id: event_based.get_events(ebruptures)
        }
    res.eff_ruptures = {src.src_group_id: src.num_ruptures}
    if param.get('gmf'):
        getter = getters.GmfGetter(rlzs_by_gsim, ebruptures, sitecol,
                                   param['oqparam'], param['min_iml'],
                                   param['samples'])
        res.update(getter.compute_gmfs_curves(monitor))
    return res
示例#29
0
def sample_ruptures(sources, cmaker, sitecol=None, monitor=Monitor()):
    """
    :param sources:
        a sequence of sources of the same group
    :param cmaker:
        a ContextMaker instance with ses_per_logic_tree_path, ses_seed
    :param sitecol:
        SiteCollection instance used for filtering (None for no filtering)
    :param monitor:
        monitor instance
    :yields:
        dictionaries with keys rup_array, calc_times
    """
    srcfilter = SourceFilter(sitecol, cmaker.maximum_distance)
    # AccumDict of arrays with 3 elements num_ruptures, num_sites, calc_time
    calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
    # Compute and save stochastic event sets
    num_ses = cmaker.ses_per_logic_tree_path
    grp_id = sources[0].grp_id
    # Compute the number of occurrences of the source group. This is used
    # for cluster groups or groups with mutually exclusive sources.
    if (getattr(sources, 'atomic', False) and
            getattr(sources, 'cluster', False)):
        eb_ruptures, calc_times = sample_cluster(
            sources, srcfilter, num_ses, vars(cmaker))

        # Yield ruptures
        er = sum(src.num_ruptures for src, _ in srcfilter.filter(sources))
        yield AccumDict(dict(rup_array=get_rup_array(eb_ruptures, srcfilter),
                             calc_times=calc_times, eff_ruptures={grp_id: er}))
    else:
        eb_ruptures = []
        eff_ruptures = 0
        # AccumDict of arrays with 2 elements weight, calc_time
        calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
        for src, _ in srcfilter.filter(sources):
            nr = src.num_ruptures
            eff_ruptures += nr
            t0 = time.time()
            if len(eb_ruptures) > MAX_RUPTURES:
                # yield partial result to avoid running out of memory
                yield AccumDict(dict(rup_array=get_rup_array(eb_ruptures,
                                                             srcfilter),
                                     calc_times={}, eff_ruptures={}))
                eb_ruptures.clear()
            samples = getattr(src, 'samples', 1)
            for rup, trt_smr, n_occ in src.sample_ruptures(
                    samples * num_ses, cmaker.ses_seed):
                ebr = EBRupture(rup, src.source_id, trt_smr, n_occ)
                eb_ruptures.append(ebr)
            dt = time.time() - t0
            calc_times[src.id] += numpy.array([nr, src.nsites, dt])
        rup_array = get_rup_array(eb_ruptures, srcfilter)
        yield AccumDict(dict(rup_array=rup_array, calc_times=calc_times,
                             eff_ruptures={grp_id: eff_ruptures}))
示例#30
0
    def _read_scenario_ruptures(self):
        oq = self.oqparam
        if oq.inputs['rupture_model'].endswith(('.xml', '.toml', '.txt')):
            self.gsims = readinput.get_gsims(oq)
            self.cmaker = ContextMaker(
                '*', self.gsims,
                {'maximum_distance': oq.maximum_distance,
                 'filter_distance': oq.filter_distance})
            n_occ = numpy.array([oq.number_of_ground_motion_fields])
            rup = readinput.get_rupture(oq)
            ebr = EBRupture(rup, 0, 0, n_occ)
            ebr.e0 = 0
            rup_array = get_rup_array([ebr], self.srcfilter).array
            mesh = surface_to_array(rup.surface).transpose(1, 2, 0).flatten()
            hdf5.extend(self.datastore['rupgeoms'],
                        numpy.array([mesh], object))
        elif oq.inputs['rupture_model'].endswith('.csv'):
            aw = readinput.get_ruptures(oq.inputs['rupture_model'])
            rup_array = aw.array
            hdf5.extend(self.datastore['rupgeoms'], aw.geom)

        if len(rup_array) == 0:
            raise RuntimeError(
                'There are no sites within the maximum_distance'
                ' of %s km from the rupture' % oq.maximum_distance(
                    rup.tectonic_region_type, rup.mag))

        gsim_lt = readinput.get_gsim_lt(self.oqparam)
        # check the number of branchsets
        branchsets = len(gsim_lt._ltnode)
        if len(rup_array) == 1 and branchsets > 1:
            raise InvalidFile(
                '%s for a scenario calculation must contain a single '
                'branchset, found %d!' % (oq.inputs['job_ini'], branchsets))

        fake = logictree.FullLogicTree.fake(gsim_lt)
        self.realizations = fake.get_realizations()
        self.datastore['full_lt'] = fake
        self.store_rlz_info({})  # store weights
        self.save_params()
        calc.RuptureImporter(self.datastore).import_rups(rup_array)
示例#31
0
def compute_ruptures(sources, src_filter, gsims, param, monitor):
    """
    :param sources: a list with a single UCERF source
    :param src_filter: a SourceFilter instance
    :param gsims: a list of GSIMs
    :param param: extra parameters
    :param monitor: a Monitor instance
    :returns: an AccumDict grp_id -> EBRuptures
    """
    [src] = sources
    res = AccumDict()
    res.calc_times = []
    serial = 1
    sampl_mon = monitor('sampling ruptures', measuremem=True)
    filt_mon = monitor('filtering ruptures', measuremem=False)
    res.trt = DEFAULT_TRT
    ebruptures = []
    background_sids = src.get_background_sids(src_filter)
    sitecol = src_filter.sitecol
    cmaker = ContextMaker(gsims, src_filter.integration_distance)
    for sample in range(param['samples']):
        for ses_idx, ses_seed in param['ses_seeds']:
            seed = sample * TWO16 + ses_seed
            with sampl_mon:
                rups, n_occs = generate_event_set(src, background_sids,
                                                  src_filter, seed)
            with filt_mon:
                for rup, n_occ in zip(rups, n_occs):
                    rup.serial = serial
                    rup.seed = seed
                    try:
                        rup.sctx, rup.dctx = cmaker.make_contexts(sitecol, rup)
                        indices = rup.sctx.sids
                    except FarAwayRupture:
                        continue
                    events = []
                    for _ in range(n_occ):
                        events.append((0, src.src_group_id, ses_idx, sample))
                    if events:
                        evs = numpy.array(events, stochastic.event_dt)
                        ebruptures.append(EBRupture(rup, indices, evs))
                        serial += 1
    res.num_events = len(stochastic.set_eids(ebruptures))
    res[src.src_group_id] = ebruptures
    if not param['save_ruptures']:
        res.events_by_grp = {
            grp_id: event_based.get_events(res[grp_id])
            for grp_id in res
        }
    res.eff_ruptures = {src.src_group_id: src.num_ruptures}
    return res
示例#32
0
def sample_ebruptures(src_groups, cmakerdict):
    """
    Sample independent sources without filtering.

    :param src_groups: a list of source groups
    :param cmakerdict: a dictionary TRT -> cmaker
    :returns: a list of EBRuptures
    """
    ebrs = []
    e0 = 0
    ordinal = 0
    for sg in src_groups:
        cmaker = cmakerdict[sg.trt]
        for src in sg:
            samples = getattr(src, 'samples', 1)
            for rup, trt_smr, n_occ in src.sample_ruptures(
                    samples * cmaker.ses_per_logic_tree_path, cmaker.ses_seed):
                ebr = EBRupture(rup, src.source_id, trt_smr, n_occ, e0=e0)
                ebr.ordinal = ordinal
                ebrs.append(ebr)
                e0 += n_occ
                ordinal += 1
    return ebrs