def post_execute(self, result): """ :param result: a dictionary (trt_model_id, gsim) -> haz_curves or an empty dictionary if hazard_curves_from_gmfs is false """ oq = self.oqparam if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields: return if oq.hazard_curves_from_gmfs: ClassicalCalculator.post_execute.__func__(self, result) if oq.compare_with_classical: # compute classical curves export_dir = os.path.join(oq.export_dir, 'cl') if not os.path.exists(export_dir): os.makedirs(export_dir) oq.export_dir = export_dir # use a different datastore self.cl = ClassicalCalculator(oq, self.monitor) self.cl.datastore.parent = self.datastore result = self.cl.run(pre_execute=False, clean_up=False) for imt in self.mean_curves.dtype.fields: rdiff, index = max_rel_diff_index(self.cl.mean_curves[imt], self.mean_curves[imt]) logging.warn( 'Relative difference with the classical ' 'mean curves for IMT=%s: %d%% at site index %d', imt, rdiff * 100, index)
def post_execute(self, result): """ Return a dictionary with the output files, i.e. gmfs (if any) and hazard curves (if any). """ oq = self.oqparam if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields: return if oq.hazard_curves_from_gmfs: ClassicalCalculator.post_execute.__func__(self, result) if oq.ground_motion_fields: for (trt_id, gsim), gmf_by_tag in self.gmf_dict.items(): self.gmf_dict[trt_id, gsim] = {tag: gmf_by_tag[tag][gsim] for tag in gmf_by_tag} self.gmf_by_trt_gsim = self.gmf_dict self.gmf_dict.clear() if oq.mean_hazard_curves: # compute classical ones export_dir = os.path.join(oq.export_dir, 'cl') if not os.path.exists(export_dir): os.makedirs(export_dir) oq.export_dir = export_dir # use a different datastore self.cl = ClassicalCalculator(oq, self.monitor) # copy the relevant attributes self.cl.composite_source_model = self.csm self.cl.sitecol = self.sitecol self.cl.rlzs_assoc = self.csm.get_rlzs_assoc() result = self.cl.run(pre_execute=False, clean_up=False) for imt in self.mean_curves.dtype.fields: rdiff, index = max_rel_diff_index( self.cl.mean_curves[imt], self.mean_curves[imt]) logging.warn('Relative difference with the classical ' 'mean curves for IMT=%s: %d%% at site index %d', imt, rdiff * 100, index)
def post_execute(self, result): """ :param result: a dictionary (trt_model_id, gsim) -> haz_curves or an empty dictionary if hazard_curves_from_gmfs is false """ oq = self.oqparam if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields: return if oq.hazard_curves_from_gmfs: ClassicalCalculator.post_execute.__func__(self, result) if oq.compare_with_classical: # compute classical curves export_dir = os.path.join(oq.export_dir, 'cl') if not os.path.exists(export_dir): os.makedirs(export_dir) oq.export_dir = export_dir # use a different datastore self.cl = ClassicalCalculator(oq, self.monitor) # copy the relevant attributes self.cl.composite_source_model = self.csm self.cl.sitecol = self.sitecol.complete self.cl.rlzs_assoc = self.csm.get_rlzs_assoc() result = self.cl.run(pre_execute=False, clean_up=False) for imt in self.mean_curves.dtype.fields: rdiff, index = max_rel_diff_index( self.cl.mean_curves[imt], self.mean_curves[imt]) logging.warn('Relative difference with the classical ' 'mean curves for IMT=%s: %d%% at site index %d', imt, rdiff * 100, index)
class EventBasedCalculator(ClassicalCalculator): """ Event based PSHA calculator generating the ruptures only """ pre_calculator = 'event_based_rupture' core_func = compute_gmfs_and_curves gmf_by_trt_gsim = datastore.persistent_attribute('gmf_by_trt_gsim') is_stochastic = True def pre_execute(self): """ Read the precomputed ruptures (or compute them on the fly) and prepare some empty files in the export directory to store the gmfs (if any). If there were pre-existing files, they will be erased. """ super(EventBasedCalculator, self).pre_execute() rupture_by_tag = sum(self.datastore['sescollection'], AccumDict()) self.sesruptures = [rupture_by_tag[tag] for tag in sorted(rupture_by_tag)] def combine_curves_and_save_gmfs(self, acc, res): """ Combine the hazard curves (if any) and save the gmfs (if any) sequentially; however, notice that the gmfs may come from different tasks in any order. :param acc: an accumulator for the hazard curves :param res: a dictionary trt_id, gsim -> (gmfs, curves_by_imt) :returns: a new accumulator """ gen_gmf = self.oqparam.ground_motion_fields for trt_id, gsim in res: gmf_by_tag, curves_by_imt = res[trt_id, gsim] if gen_gmf: self.gmf_dict[trt_id, gsim] += gmf_by_tag acc = agg_dicts(acc, AccumDict({(trt_id, gsim): curves_by_imt})) return acc def execute(self): """ Run in parallel `core_func(sources, sitecol, monitor)`, by parallelizing on the ruptures according to their weight and tectonic region type. """ oq = self.oqparam if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields: return monitor = self.monitor(self.core_func.__name__) monitor.oqparam = oq zc = zero_curves(len(self.sitecol), self.oqparam.imtls) zerodict = AccumDict((key, zc) for key in self.rlzs_assoc) self.gmf_dict = collections.defaultdict(AccumDict) curves_by_trt_gsim = parallel.apply_reduce( self.core_func.__func__, (self.sesruptures, self.sitecol, self.rlzs_assoc, monitor), concurrent_tasks=self.oqparam.concurrent_tasks, acc=zerodict, agg=self.combine_curves_and_save_gmfs, key=operator.attrgetter('col_id')) return curves_by_trt_gsim def post_execute(self, result): """ Return a dictionary with the output files, i.e. gmfs (if any) and hazard curves (if any). """ oq = self.oqparam if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields: return if oq.hazard_curves_from_gmfs: ClassicalCalculator.post_execute.__func__(self, result) if oq.ground_motion_fields: for (trt_id, gsim), gmf_by_tag in self.gmf_dict.items(): self.gmf_dict[trt_id, gsim] = {tag: gmf_by_tag[tag][gsim] for tag in gmf_by_tag} self.gmf_by_trt_gsim = self.gmf_dict self.gmf_dict.clear() if oq.mean_hazard_curves: # compute classical ones export_dir = os.path.join(oq.export_dir, 'cl') if not os.path.exists(export_dir): os.makedirs(export_dir) oq.export_dir = export_dir # use a different datastore self.cl = ClassicalCalculator(oq, self.monitor) # copy the relevant attributes self.cl.composite_source_model = self.csm self.cl.sitecol = self.sitecol self.cl.rlzs_assoc = self.csm.get_rlzs_assoc() result = self.cl.run(pre_execute=False, clean_up=False) for imt in self.mean_curves.dtype.fields: rdiff, index = max_rel_diff_index( self.cl.mean_curves[imt], self.mean_curves[imt]) logging.warn('Relative difference with the classical ' 'mean curves for IMT=%s: %d%% at site index %d', imt, rdiff * 100, index)
class EventBasedCalculator(ClassicalCalculator): """ Event based PSHA calculator generating the ruptures only """ pre_calculator = 'event_based_rupture' core_func = compute_gmfs_and_curves is_stochastic = True def pre_execute(self): """ Read the precomputed ruptures (or compute them on the fly) and prepare some empty files in the export directory to store the gmfs (if any). If there were pre-existing files, they will be erased. """ super(EventBasedCalculator, self).pre_execute() self.sesruptures = [] gsims_by_col = self.rlzs_assoc.get_gsims_by_col() self.datasets = [] for col_id, sescol in enumerate(self.datastore['sescollection']): gmf_dt = gsim_imt_dt(gsims_by_col[col_id], self.oqparam.imtls) for tag, sesrup in sorted(sescol.iteritems()): sesrup = sescol[tag] self.sesruptures.append(sesrup) self.datasets.append( self.datastore.create_dset('gmfs/col%02d' % col_id, gmf_dt)) def combine_curves_and_save_gmfs(self, acc, res): """ Combine the hazard curves (if any) and save the gmfs (if any) sequentially; notice that the gmfs may come from different tasks in any order. :param acc: an accumulator for the hazard curves :param res: a dictionary trt_id, gsim -> gmf_array or curves_by_imt :returns: a new accumulator """ sav_mon = self.monitor('saving gmfs') agg_mon = self.monitor('aggregating hcurves') save_gmfs = self.oqparam.ground_motion_fields for trt_id, gsim_or_col in res: if isinstance(gsim_or_col, int) and save_gmfs: with sav_mon: gmfa = res[trt_id, gsim_or_col] dataset = self.datasets[gsim_or_col] dataset.attrs['trt_model_id'] = trt_id dataset.extend(gmfa) self.nbytes += gmfa.nbytes self.datastore.hdf5.flush() elif isinstance(gsim_or_col, str): # aggregate hcurves with agg_mon: curves_by_imt = res[trt_id, gsim_or_col] acc = agg_dicts( acc, AccumDict({(trt_id, gsim_or_col): curves_by_imt})) sav_mon.flush() agg_mon.flush() return acc def execute(self): """ Run in parallel `core_func(sources, sitecol, monitor)`, by parallelizing on the ruptures according to their weight and tectonic region type. """ oq = self.oqparam if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields: return monitor = self.monitor(self.core_func.__name__) monitor.oqparam = oq zc = zero_curves(len(self.sitecol.complete), self.oqparam.imtls) zerodict = AccumDict((key, zc) for key in self.rlzs_assoc) self.nbytes = 0 curves_by_trt_gsim = parallel.apply_reduce( self.core_func.__func__, (self.sesruptures, self.sitecol, self.rlzs_assoc, monitor), concurrent_tasks=self.oqparam.concurrent_tasks, acc=zerodict, agg=self.combine_curves_and_save_gmfs, key=operator.attrgetter('col_id')) if oq.ground_motion_fields: # sanity check on the saved gmfs size expected_nbytes = self.datastore[ 'counts_per_rlz'].attrs['gmfs_nbytes'] self.datastore['gmfs'].attrs['nbytes'] = self.nbytes assert self.nbytes == expected_nbytes, ( self.nbytes, expected_nbytes) return curves_by_trt_gsim def post_execute(self, result): """ :param result: a dictionary (trt_model_id, gsim) -> haz_curves or an empty dictionary if hazard_curves_from_gmfs is false """ oq = self.oqparam if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields: return if oq.hazard_curves_from_gmfs: ClassicalCalculator.post_execute.__func__(self, result) if oq.compare_with_classical: # compute classical curves export_dir = os.path.join(oq.export_dir, 'cl') if not os.path.exists(export_dir): os.makedirs(export_dir) oq.export_dir = export_dir # use a different datastore self.cl = ClassicalCalculator(oq, self.monitor) # copy the relevant attributes self.cl.composite_source_model = self.csm self.cl.sitecol = self.sitecol.complete self.cl.rlzs_assoc = self.csm.get_rlzs_assoc() result = self.cl.run(pre_execute=False, clean_up=False) for imt in self.mean_curves.dtype.fields: rdiff, index = max_rel_diff_index( self.cl.mean_curves[imt], self.mean_curves[imt]) logging.warn('Relative difference with the classical ' 'mean curves for IMT=%s: %d%% at site index %d', imt, rdiff * 100, index)
class EventBasedCalculator(ClassicalCalculator): """ Event based PSHA calculator generating the ruptures only """ pre_calculator = 'event_based_rupture' core_func = compute_gmfs_and_curves is_stochastic = True def pre_execute(self): """ Read the precomputed ruptures (or compute them on the fly) and prepare some empty files in the export directory to store the gmfs (if any). If there were pre-existing files, they will be erased. """ super(EventBasedCalculator, self).pre_execute() self.sesruptures = [] gsims_by_col = self.rlzs_assoc.get_gsims_by_col() self.datasets = {} for col_id, sescol in enumerate(self.datastore['sescollection']): gmf_dt = gsim_imt_dt(gsims_by_col[col_id], self.oqparam.imtls) for tag, sesrup in sorted(sescol.items()): sesrup = sescol[tag] self.sesruptures.append(sesrup) if self.oqparam.ground_motion_fields and sescol: self.datasets[col_id] = self.datastore.create_dset( 'gmfs/col%02d' % col_id, gmf_dt) def combine_curves_and_save_gmfs(self, acc, res): """ Combine the hazard curves (if any) and save the gmfs (if any) sequentially; notice that the gmfs may come from different tasks in any order. :param acc: an accumulator for the hazard curves :param res: a dictionary trt_id, gsim -> gmf_array or curves_by_imt :returns: a new accumulator """ sav_mon = self.monitor('saving gmfs') agg_mon = self.monitor('aggregating hcurves') save_gmfs = self.oqparam.ground_motion_fields for trt_id, gsim_or_col in res: if isinstance(gsim_or_col, int) and save_gmfs: with sav_mon: gmfa = res[trt_id, gsim_or_col] dataset = self.datasets[gsim_or_col] dataset.attrs['trt_model_id'] = trt_id dataset.extend(gmfa) self.nbytes += gmfa.nbytes self.datastore.hdf5.flush() elif isinstance(gsim_or_col, str): # aggregate hcurves with agg_mon: curves_by_imt = res[trt_id, gsim_or_col] acc = agg_dicts( acc, AccumDict({(trt_id, gsim_or_col): curves_by_imt})) sav_mon.flush() agg_mon.flush() return acc def execute(self): """ Run in parallel `core_func(sources, sitecol, monitor)`, by parallelizing on the ruptures according to their weight and tectonic region type. """ oq = self.oqparam if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields: return monitor = self.monitor(self.core_func.__name__) monitor.oqparam = oq zc = zero_curves(len(self.sitecol.complete), self.oqparam.imtls) zerodict = AccumDict((key, zc) for key in self.rlzs_assoc) self.nbytes = 0 curves_by_trt_gsim = parallel.apply_reduce( self.core_func.__func__, (self.sesruptures, self.sitecol, self.rlzs_assoc, monitor), concurrent_tasks=self.oqparam.concurrent_tasks, acc=zerodict, agg=self.combine_curves_and_save_gmfs, key=operator.attrgetter('col_id')) if oq.ground_motion_fields: # sanity check on the saved gmfs size expected_nbytes = self.datastore['counts_per_rlz'].attrs[ 'gmfs_nbytes'] self.datastore['gmfs'].attrs['nbytes'] = self.nbytes assert self.nbytes == expected_nbytes, (self.nbytes, expected_nbytes) return curves_by_trt_gsim def post_execute(self, result): """ :param result: a dictionary (trt_model_id, gsim) -> haz_curves or an empty dictionary if hazard_curves_from_gmfs is false """ oq = self.oqparam if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields: return if oq.hazard_curves_from_gmfs: ClassicalCalculator.post_execute.__func__(self, result) if oq.compare_with_classical: # compute classical curves export_dir = os.path.join(oq.export_dir, 'cl') if not os.path.exists(export_dir): os.makedirs(export_dir) oq.export_dir = export_dir # use a different datastore self.cl = ClassicalCalculator(oq, self.monitor) self.cl.datastore.parent = self.datastore result = self.cl.run(pre_execute=False, clean_up=False) for imt in self.mean_curves.dtype.fields: rdiff, index = max_rel_diff_index(self.cl.mean_curves[imt], self.mean_curves[imt]) logging.warn( 'Relative difference with the classical ' 'mean curves for IMT=%s: %d%% at site index %d', imt, rdiff * 100, index)