def safely_call(func, args, pickle=False): """ Call the given function with the given arguments safely, i.e. by trapping the exceptions. Return a pair (result, exc_type) where exc_type is None if no exceptions occur, otherwise it is the exception class and the result is a string containing error message and traceback. :param func: the function to call :param args: the arguments :param pickle: if set, the input arguments are unpickled and the return value is pickled; otherwise they are left unchanged """ if pickle: args = [a.unpickle() for a in args] mon = args and isinstance(args[-1], PerformanceMonitor) try: res = func(*args), None, args[-1] if mon else DummyMonitor() except: etype, exc, tb = sys.exc_info() tb_str = ''.join(traceback.format_tb(tb)) res = ('\n%s%s: %s' % (tb_str, etype.__name__, exc), etype, args[-1] if mon else DummyMonitor()) if pickle: return Pickled(res) return res
def __init__(self, oqparam, monitor=DummyMonitor(), calc_id=None, persistent=True): self.monitor = monitor if persistent: self.datastore = datastore.DataStore(calc_id) else: self.datastore = general.AccumDict() self.datastore.hdf5 = {} self.datastore.attrs = {} self.datastore.export_dir = oqparam.export_dir self.oqparam = oqparam self.persistent = persistent
def __init__(self, oqparam, monitor=DummyMonitor(), calc_id=None, persistent=True): self.monitor = monitor if persistent: self.datastore = datastore.DataStore(calc_id) else: self.datastore = general.AccumDict() self.datastore.hdf5 = {} self.datastore.export_dir = oqparam.export_dir if 'oqparam' not in self.datastore: # new datastore self.oqparam = oqparam # else we are doing a precalculation; oqparam has been already stored self.persistent = persistent
def get_composite_source_model(oqparam, sitecol=None, SourceProcessor=source.SourceFilterSplitter, monitor=DummyMonitor(), no_distribute=parallel.no_distribute()): """ Build the source models by splitting the sources. If prefiltering is enabled, also reduce the GSIM logic trees in the underlying source models. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param sitecol: a :class:`openquake.hazardlib.site.SiteCollection` instance :param SourceProcessor: the class used to process the sources :param monitor: a monitor instance :param no_distribute: used to disable parallel splitting of the sources :returns: an iterator over :class:`openquake.commonlib.source.SourceModel` """ processor = SourceProcessor(sitecol, oqparam.maximum_distance, oqparam.area_source_discretization) source_model_lt = get_source_model_lt(oqparam) smodels = [] trt_id = 0 for source_model in get_source_models(oqparam, source_model_lt, processor.sitecol, in_memory=hasattr( processor, 'process')): for trt_model in source_model.trt_models: trt_model.id = trt_id trt_id += 1 smodels.append(source_model) csm = source.CompositeSourceModel(source_model_lt, smodels) if sitecol is not None and hasattr(processor, 'process'): seqtime, partime = processor.process(csm, no_distribute) monitor.write(['fast sources filtering/splitting', str(seqtime), '0']) monitor.write(['slow sources filtering/splitting', str(partime), '0']) if not csm.get_sources(): raise RuntimeError('All sources were filtered away') csm.count_ruptures() return csm
def compute_expand_gmfs(self): """ :returns: an array R x N where N is the number of sites and R is the number of ruptures. """ from openquake.commonlib.calculators.event_based import make_gmfs gmfs = make_gmfs(self.ses_ruptures, self.sitecol, self.imts, self.gsims, self.trunc_level, self.correl_model, DummyMonitor()) gmf_dt = gsim_imt_dt(self.gsims, self.imts) N = len(self.sitecol.complete) R = len(gmfs) gmfa = numpy.zeros((R, N), gmf_dt) for i, sesrup, gmf in zip(range(R), self.ses_ruptures, gmfs): expanded_gmf = numpy.zeros(N, gmf_dt) expanded_gmf[sesrup.indices] = gmf gmfa[i] = expanded_gmf return gmfa # array R x N
def __init__(self, oqparam, monitor=DummyMonitor(), calc_id=None): self.monitor = monitor self.datastore = datastore.DataStore(calc_id) self.monitor.hdf5path = self.datastore.hdf5path self.datastore.export_dir = oqparam.export_dir self.oqparam = oqparam
def hazard_curves_per_trt(sources, sites, imtls, gsims, truncation_level=None, source_site_filter=filters.source_site_noop_filter, rupture_site_filter=filters.rupture_site_noop_filter, monitor=DummyMonitor()): """ Compute the hazard curves for a set of sources belonging to the same tectonic region type for all the GSIMs associated to that TRT. The arguments are the same as in :func:`calc_hazard_curves`, except for ``gsims``, which is a list of GSIM instances. :returns: A list of G arrays of size N, where N is the number of sites and G the number of gsims. Each array contains records with fields given by the intensity measure types; the size of each field is given by the number of levels in ``imtls``. """ gnames = list(map(str, gsims)) imt_dt = numpy.dtype([(imt, float, len(imtls[imt])) for imt in sorted(imtls)]) imts = {from_string(imt): imls for imt, imls in imtls.items()} curves = [numpy.ones(len(sites), imt_dt) for gname in gnames] sources_sites = ((source, sites) for source in sources) ctx_mon = monitor('making contexts', measuremem=False) rup_mon = monitor('getting ruptures', measuremem=False) pne_mon = monitor('computing poes', measuremem=False) monitor.calc_times = [] # pairs (src_id, delta_t) for source, s_sites in source_site_filter(sources_sites): t0 = time.time() try: with rup_mon: rupture_sites = list( rupture_site_filter((rupture, s_sites) for rupture in source.iter_ruptures())) for rupture, r_sites in rupture_sites: for i, gsim in enumerate(gsims): with ctx_mon: sctx, rctx, dctx = gsim.make_contexts(r_sites, rupture) with pne_mon: for imt in imts: poes = gsim.get_poes(sctx, rctx, dctx, imt, imts[imt], truncation_level) pno = rupture.get_probability_no_exceedance(poes) expanded_pno = r_sites.expand(pno, placeholder=1) curves[i][str(imt)] *= expanded_pno except Exception as err: etype, err, tb = sys.exc_info() msg = 'An error occurred with source id=%s. Error: %s' msg %= (source.source_id, str(err)) raise_(etype, msg, tb) # we are attaching the calculation times to the monitor # so that oq-lite (and the engine) can store them monitor.calc_times.append((source.id, time.time() - t0)) # NB: source.id is an integer; it should not be confused # with source.source_id, which is a string for i in range(len(gnames)): for imt in imtls: curves[i][imt] = 1. - curves[i][imt] return curves