def make_figure_dist_by_mag(extractors, what): """ $ oq plot "dist_by_mag?" """ # NB: matplotlib is imported inside since it is a costly import import matplotlib.pyplot as plt [ex] = extractors effect = ex.get('effect') mags = ['%.2f' % mag for mag in effect.mags] fig, ax = plt.subplots() trti = 0 for trt, dists in effect.dist_bins.items(): dic = dict(zip(mags, effect[:, :, trti])) if ex.oqparam.pointsource_distance: pdist = getdefault(ex.oqparam.pointsource_distance, trt) else: pdist = None eff = Effect(dic, dists, pdist) dist_by_mag = eff.dist_by_mag() ax.plot(effect.mags, list(dist_by_mag.values()), label=trt, color='red') if pdist: dist_by_mag = eff.dist_by_mag(eff.collapse_value) ax.plot(effect.mags, list(dist_by_mag.values()), label=trt, color='green') ax.set_xlabel('Mag') ax.set_ylabel('Dist') ax.set_title('Integration Distance at intensity=%s' % eff.zero_value) trti += 1 ax.legend() return plt
def test_dist_by_mag(self): effect = Effect(intensities, dists) dist = list(effect.dist_by_mag(0).values()) numpy.testing.assert_allclose(dist, [50, 50, 50, 50]) dist = list(effect.dist_by_mag(.9).values()) numpy.testing.assert_allclose(dist, [12, 15, 19.677419, 20]) dist = list(effect.dist_by_mag(1.1).values()) numpy.testing.assert_allclose(dist, [0, 10, 13.225806, 16.666667])
def make_figure_dist_by_mag(extractors, what): """ $ oq plot 'dist_by_mag?threshold=.01' """ # NB: matplotlib is imported inside since it is a costly import import matplotlib.pyplot as plt [ex] = extractors effect = ex.get('effect') mags = ['%.3f' % mag for mag in effect.mags] fig, ax = plt.subplots() prefix, rest = what.split('?', 1) threshold = float(rest.split('=')[1]) trti = 0 for trt, dists in effect.dist_bins.items(): dic = dict(zip(mags, effect[:, :, trti])) dist_by_mag = Effect(dic, dists, threshold).dist_by_mag() ax.plot(effect.mags, list(dist_by_mag.values()), label=trt) ax.set_xlabel('Mag') ax.set_ylabel('Dist') ax.set_title('Integration Distance at intensity=%s' % threshold) trti += 1 return plt
def get_effect(mags, sitecol, gsims_by_trt, oq): """ :returns: an ArrayWrapper effect_by_mag_dst_trt Also updates oq.maximum_distance.magdist and oq.pointsource_distance """ dist_bins = { trt: oq.maximum_distance.get_dist_bins(trt) for trt in gsims_by_trt } # computing the effect make sense only if all IMTs have the same # unity of measure; for simplicity we will consider only PGA and SA effect = {} imts_with_period = [ imt for imt in oq.imtls if imt == 'PGA' or imt.startswith('SA') ] imts_ok = len(imts_with_period) == len(oq.imtls) aw = hdf5.ArrayWrapper((), dist_bins) if sitecol is None: return aw if len(sitecol) >= oq.max_sites_disagg and imts_ok: logging.info('Computing effect of the ruptures') mon = performance.Monitor('rupture effect') eff_by_mag = parallel.Starmap.apply( get_effect_by_mag, (mags, sitecol.one(), gsims_by_trt, oq.maximum_distance, oq.imtls, mon)).reduce() aw.array = eff_by_mag effect.update({ trt: Effect({mag: eff_by_mag[mag][:, t] for mag in eff_by_mag}, dist_bins[trt]) for t, trt in enumerate(gsims_by_trt) }) minint = oq.minimum_intensity.get('default', 0) for trt, eff in effect.items(): if minint: oq.maximum_distance.magdist[trt] = eff.dist_by_mag(minint) # replace pointsource_distance with a dict trt -> mag -> dst if oq.pointsource_distance['default']: oq.pointsource_distance[trt] = eff.dist_by_mag( eff.collapse_value(oq.pointsource_distance['default'])) elif oq.pointsource_distance['default']: # replace pointsource_distance with a dict trt -> mag -> dst for trt in gsims_by_trt: try: dst = getdefault(oq.pointsource_distance, trt) except TypeError: # 'NoneType' object is not subscriptable dst = getdefault(oq.maximum_distance, trt) oq.pointsource_distance[trt] = {mag: dst for mag in mags} return aw
def get_effect(mags, sitecol, gsims_by_trt, oq): """ :returns: an ArrayWrapper effect_by_mag_dst_trt Updates oq.maximum_distance.magdist and oq.pointsource_distance """ dist_bins = { trt: oq.maximum_distance.get_dist_bins(trt) for trt in gsims_by_trt } aw = hdf5.ArrayWrapper((), dist_bins) if sitecol is None: return aw # computing the effect make sense only if all IMTs have the same # unity of measure; for simplicity we will consider only PGA and SA effect = {} imts_with_period = [ imt for imt in oq.imtls if imt == 'PGA' or imt.startswith('SA') ] imts_ok = len(imts_with_period) == len(oq.imtls) psd = oq.pointsource_distance['default'] effect_ok = imts_ok and (psd or oq.minimum_intensity) if effect_ok: logging.info('Computing effect of the ruptures') eff_by_mag = parallel.Starmap.apply( get_effect_by_mag, (mags, sitecol.one(), gsims_by_trt, oq.maximum_distance, oq.imtls)).reduce() aw.array = eff_by_mag effect.update({ trt: Effect({mag: eff_by_mag[mag][:, t] for mag in eff_by_mag}, dist_bins[trt]) for t, trt in enumerate(gsims_by_trt) }) minint = oq.minimum_intensity.get('default', 0) for trt, eff in effect.items(): if minint: oq.maximum_distance.magdist[trt] = eff.dist_by_mag(minint) # replace pointsource_distance with a dict trt -> mag -> dst if psd: oq.pointsource_distance[trt] = eff.dist_by_mag( eff.collapse_value(psd)) elif psd: # like in case_24 with PGV for trt in dist_bins: pdist = getdefault(oq.pointsource_distance, trt) oq.pointsource_distance[trt] = {mag: pdist for mag in mags} return aw
def execute(self): """ Run in parallel `core_task(sources, sitecol, monitor)`, by parallelizing on the sources according to their weight and tectonic region type. """ oq = self.oqparam if oq.hazard_calculation_id and not oq.compare_with_classical: with util.read(self.oqparam.hazard_calculation_id) as parent: self.csm_info = parent['csm_info'] self.calc_stats() # post-processing return {} mags = self.datastore['source_mags'][()] gsims_by_trt = self.csm_info.get_gsims_by_trt() dist_bins = { trt: oq.maximum_distance.get_dist_bins(trt) for trt in gsims_by_trt } if oq.pointsource_distance and len(mags): logging.info('Computing effect of the ruptures') mon = self.monitor('rupture effect') effect = parallel.Starmap.apply( get_effect_by_mag, (mags, self.sitecol.one(), gsims_by_trt, oq.maximum_distance, oq.imtls, mon)).reduce() self.datastore['effect'] = effect self.datastore.set_attrs('effect', **dist_bins) self.effect = { trt: Effect({mag: effect[mag][:, t] for mag in effect}, dist_bins[trt], getdefault(oq.pointsource_distance, trt)) for t, trt in enumerate(gsims_by_trt) } for trt, eff in self.effect.items(): oq.maximum_distance.magdist[trt] = eff.dist_by_mag() oq.pointsource_distance[trt] = eff.dist_by_mag( eff.collapse_value) else: self.effect = {} if oq.calculation_mode == 'preclassical' and self.N == 1: smap = parallel.Starmap(ruptures_by_mag_dist) for func, args in self.gen_task_queue(): smap.submit(args) counts = smap.reduce() ndists = oq.maximum_distance.get_dist_bins.__defaults__[0] for mag, mag in enumerate(mags): arr = numpy.zeros((ndists, len(gsims_by_trt)), U32) for trti, trt in enumerate(gsims_by_trt): try: arr[:, trti] = counts[trt][mag] except KeyError: pass self.datastore['rups_by_mag_dist/' + mag] = arr self.datastore.set_attrs('rups_by_mag_dist', **dist_bins) self.datastore['csm_info'] = self.csm_info return {} smap = parallel.Starmap(self.core_task.__func__, h5=self.datastore.hdf5) smap.task_queue = list(self.gen_task_queue()) # really fast acc0 = self.acc0() # create the rup/ datasets BEFORE swmr_on() self.datastore.swmr_on() smap.h5 = self.datastore.hdf5 self.calc_times = AccumDict(accum=numpy.zeros(3, F32)) self.maxdists = [] try: acc = smap.get_results().reduce(self.agg_dicts, acc0) self.store_rlz_info(acc.eff_ruptures) finally: if self.maxdists: maxdist = numpy.mean(self.maxdists) logging.info( 'Using effective maximum distance for ' 'point sources %d km', maxdist) with self.monitor('store source_info'): self.store_source_info(self.calc_times) if self.sources_by_task: num_tasks = max(self.sources_by_task) + 1 sbt = numpy.zeros(num_tasks, [('eff_ruptures', U32), ('eff_sites', U32), ('srcids', hdf5.vuint32)]) for task_no in range(num_tasks): sbt[task_no] = self.sources_by_task.get( task_no, (0, 0, U32([]))) self.datastore['sources_by_task'] = sbt self.sources_by_task.clear() numrups = sum(arr[0] for arr in self.calc_times.values()) if self.totrups != numrups: logging.info('Considered %d/%d ruptures', numrups, self.totrups) self.calc_times.clear() # save a bit of memory return acc
def execute(self): """ Run in parallel `core_task(sources, sitecol, monitor)`, by parallelizing on the sources according to their weight and tectonic region type. """ oq = self.oqparam if oq.hazard_calculation_id and not oq.compare_with_classical: with util.read(self.oqparam.hazard_calculation_id) as parent: self.csm_info = parent['csm_info'] self.calc_stats() # post-processing return {} mags = self.datastore['source_mags'][()] if len(mags) == 0: # everything was discarded raise RuntimeError('All sources were discarded!?') gsims_by_trt = self.csm_info.get_gsims_by_trt() dist_bins = { trt: oq.maximum_distance.get_dist_bins(trt) for trt in gsims_by_trt } if oq.pointsource_distance: logging.info('Computing effect of the ruptures') mon = self.monitor('rupture effect') effect = parallel.Starmap.apply( get_effect_by_mag, (mags, self.sitecol.one(), gsims_by_trt, oq.maximum_distance, oq.imtls, mon)).reduce() self.datastore['effect'] = effect self.datastore.set_attrs('effect', **dist_bins) self.effect = { trt: Effect({mag: effect[mag][:, t] for mag in effect}, dist_bins[trt], getdefault(oq.pointsource_distance, trt)) for t, trt in enumerate(gsims_by_trt) } for trt, eff in self.effect.items(): oq.maximum_distance.magdist[trt] = eff.dist_by_mag() oq.pointsource_distance[trt] = eff.dist_by_mag( eff.collapse_value) else: self.effect = {} smap = parallel.Starmap(self.core_task.__func__, h5=self.datastore.hdf5, num_cores=oq.num_cores) smap.task_queue = list(self.gen_task_queue()) # really fast acc0 = self.acc0() # create the rup/ datasets BEFORE swmr_on() self.datastore.swmr_on() smap.h5 = self.datastore.hdf5 self.calc_times = AccumDict(accum=numpy.zeros(3, F32)) try: acc = smap.get_results().reduce(self.agg_dicts, acc0) self.store_rlz_info(acc.eff_ruptures) finally: with self.monitor('store source_info'): self.store_source_info(self.calc_times) if self.by_task: logging.info('Storing by_task information') num_tasks = max(self.by_task) + 1, er = self.datastore.create_dset('by_task/eff_ruptures', U32, num_tasks) es = self.datastore.create_dset('by_task/eff_sites', U32, num_tasks) si = self.datastore.create_dset('by_task/srcids', hdf5.vuint32, num_tasks, fillvalue=None) for task_no, rec in self.by_task.items(): effrups, effsites, srcids = rec er[task_no] = effrups es[task_no] = effsites si[task_no] = srcids self.by_task.clear() numrups = sum(arr[0] for arr in self.calc_times.values()) numsites = sum(arr[1] for arr in self.calc_times.values()) logging.info('Effective number of ruptures: %d/%d', numrups, self.totrups) logging.info('Effective number of sites per rupture: %d', numsites / numrups) self.calc_times.clear() # save a bit of memory return acc
def execute(self): """ Run in parallel `core_task(sources, sitecol, monitor)`, by parallelizing on the sources according to their weight and tectonic region type. """ oq = self.oqparam if oq.hazard_calculation_id and not oq.compare_with_classical: with util.read(self.oqparam.hazard_calculation_id) as parent: self.csm_info = parent['csm_info'] self.calc_stats() # post-processing return {} mags = self.datastore['source_mags'][()] if len(mags) == 0: # everything was discarded raise RuntimeError('All sources were discarded!?') gsims_by_trt = self.csm_info.get_gsims_by_trt() dist_bins = {trt: oq.maximum_distance.get_dist_bins(trt) for trt in gsims_by_trt} # computing the effect make sense only if all IMTs have the same # unity of measure; for simplicity we will consider only PGA and SA self.effect = {} imts_with_period = [imt for imt in oq.imtls if imt == 'PGA' or imt.startswith('SA')] imts_ok = len(imts_with_period) == len(oq.imtls) if len(self.sitecol) >= oq.max_sites_disagg and imts_ok: logging.info('Computing effect of the ruptures') mon = self.monitor('rupture effect') effect = parallel.Starmap.apply( get_effect_by_mag, (mags, self.sitecol.one(), gsims_by_trt, oq.maximum_distance, oq.imtls, mon)).reduce() self.datastore['effect_by_mag_dst_trt'] = effect self.datastore.set_attrs('effect_by_mag_dst_trt', **dist_bins) self.effect.update({ trt: Effect({mag: effect[mag][:, t] for mag in effect}, dist_bins[trt]) for t, trt in enumerate(gsims_by_trt)}) minint = oq.minimum_intensity.get('default', 0) for trt, eff in self.effect.items(): if minint: oq.maximum_distance.magdist[trt] = eff.dist_by_mag(minint) # replace pointsource_distance with a dict trt -> mag -> dst if oq.pointsource_distance['default']: oq.pointsource_distance[trt] = eff.dist_by_mag( eff.collapse_value(oq.pointsource_distance['default'])) elif oq.pointsource_distance['default']: # replace pointsource_distance with a dict trt -> mag -> dst for trt in gsims_by_trt: try: dst = getdefault(oq.pointsource_distance, trt) except TypeError: # 'NoneType' object is not subscriptable dst = getdefault(oq.maximum_distance, trt) oq.pointsource_distance[trt] = {mag: dst for mag in mags} smap = parallel.Starmap( self.core_task.__func__, h5=self.datastore.hdf5, num_cores=oq.num_cores) smap.task_queue = list(self.gen_task_queue()) # really fast acc0 = self.acc0() # create the rup/ datasets BEFORE swmr_on() self.datastore.swmr_on() smap.h5 = self.datastore.hdf5 self.calc_times = AccumDict(accum=numpy.zeros(3, F32)) try: acc = smap.get_results().reduce(self.agg_dicts, acc0) self.store_rlz_info(acc.eff_ruptures) finally: with self.monitor('store source_info'): self.store_source_info(self.calc_times) if self.by_task: logging.info('Storing by_task information') num_tasks = max(self.by_task) + 1, er = self.datastore.create_dset('by_task/eff_ruptures', U32, num_tasks) es = self.datastore.create_dset('by_task/eff_sites', U32, num_tasks) si = self.datastore.create_dset('by_task/srcids', hdf5.vuint32, num_tasks, fillvalue=None) for task_no, rec in self.by_task.items(): effrups, effsites, srcids = rec er[task_no] = effrups es[task_no] = effsites si[task_no] = srcids self.by_task.clear() self.numrups = sum(arr[0] for arr in self.calc_times.values()) numsites = sum(arr[1] for arr in self.calc_times.values()) logging.info('Effective number of ruptures: %d/%d', self.numrups, self.totrups) logging.info('Effective number of sites per rupture: %d', numsites / self.numrups) self.calc_times.clear() # save a bit of memory return acc