def zerodict(self): """ Initial accumulator, an empty ProbabilityMap """ zd = ProbabilityMap() zd.calc_times = [] zd.eff_ruptures = AccumDict() # trt_id -> eff_ruptures zd.bb_dict = { (smodel.ordinal, sid): BoundingBox(smodel.ordinal, sid) for sid in self.sitecol.sids for smodel in self.csm.source_models } if self.oqparam.poes_disagg else {} return zd
def zerodict(self): """ Initial accumulator, an empty ProbabilityMap """ zd = ProbabilityMap() zd.calc_times = [] zd.eff_ruptures = AccumDict() # grp_id -> eff_ruptures zd.bb_dict = BBdict() if self.oqparam.poes_disagg: for sid in self.sitecol.sids: for smodel in self.csm.source_models: zd.bb_dict[smodel.ordinal, sid] = BoundingBox( smodel.ordinal, sid) return zd
def execute(self): """ Run in parallel `core_task(sources, sitecol, monitor)`, by parallelizing on the ruptures according to their weight and tectonic region type. """ oq = self.oqparam if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields: return monitor = self.monitor(self.core_task.__name__) monitor.oqparam = oq min_iml = fix_minimum_intensity(oq.minimum_intensity, oq.imtls) acc = parallel.apply_reduce( self.core_task.__func__, (self.sesruptures, self.sitecol, oq.imtls, self.rlzs_assoc, min_iml, monitor), concurrent_tasks=self.oqparam.concurrent_tasks, agg=self.combine_curves_and_save_gmfs, acc=ProbabilityMap(), key=operator.attrgetter('trt_id'), weight=operator.attrgetter('weight')) if oq.ground_motion_fields: self.datastore.set_nbytes('gmf_data') return acc