def execute(self): if self.oqparam.hazard_calculation_id: def saving_sources_by_task(allargs, dstore): return allargs else: from openquake.calculators.classical import saving_sources_by_task self.gmdata = {} self.offset = 0 self.gmf_size = 0 self.indices = collections.defaultdict(list) # sid, idx -> indices acc = self.zerodict() with self.monitor('managing sources', autoflush=True): allargs = self.gen_args(self.monitor('classical')) iterargs = saving_sources_by_task(allargs, self.datastore) if isinstance(allargs, list): # there is a trick here: if the arguments are known # (a list, not an iterator), keep them as a list # then the Starmap will understand the case of a single # argument tuple and it will run in core the task iterargs = list(iterargs) if self.oqparam.ground_motion_fields is False: logging.info('Generating ruptures only') ires = parallel.Starmap(self.core_task.__func__, iterargs).submit_all() acc = ires.reduce(self.agg_dicts, acc) if self.oqparam.hazard_calculation_id is None: with self.monitor('store source_info', autoflush=True): self.store_source_info(self.csm.infos, acc) self.check_overflow() # check the number of events base.save_gmdata(self, self.R) if self.indices: N = len(self.sitecol.complete) logging.info('Saving gmf_data/indices') with self.monitor('saving gmf_data/indices', measuremem=True, autoflush=True): dset = self.datastore.create_dset('gmf_data/indices', hdf5.vuint32, shape=(N, 2), fillvalue=None) for sid in self.sitecol.complete.sids: dset[sid, 0] = self.indices[sid, 0] dset[sid, 1] = self.indices[sid, 1] elif (self.oqparam.ground_motion_fields and 'ucerf' not in self.oqparam.calculation_mode): raise RuntimeError('No GMFs were generated, perhaps they were ' 'all below the minimum_intensity threshold') return acc
def execute(self): with self.monitor('managing sources', autoflush=True): allargs = self.gen_args(self.csm, self.monitor('classical')) iterargs = saving_sources_by_task(allargs, self.datastore) if isinstance(allargs, list): # there is a trick here: if the arguments are known # (a list, not an iterator), keep them as a list # then the Starmap will understand the case of a single # argument tuple and it will run in core the task iterargs = list(iterargs) acc = parallel.Starmap(self.core_task.__func__, iterargs).reduce( self.agg_dicts, self.zerodict()) with self.monitor('store source_info', autoflush=True): self.store_source_info(self.csm.infos, acc) return acc