def execute(self): oq = self.oqparam self.set_param() self.offset = 0 if oq.hazard_calculation_id: # from ruptures self.datastore.parent = datastore.read(oq.hazard_calculation_id) elif hasattr(self, 'csm'): # from sources self.build_events_from_sources() if (oq.ground_motion_fields is False and oq.hazard_curves_from_gmfs is False): return {} elif 'rupture_model' not in oq.inputs: logging.warning( 'There is no rupture_model, the calculator will just ' 'import data without performing any calculation') fake = logictree.FullLogicTree.fake() self.datastore['full_lt'] = fake # needed to expose the outputs self.datastore['weights'] = [1.] return {} else: # scenario self._read_scenario_ruptures() if (oq.ground_motion_fields is False and oq.hazard_curves_from_gmfs is False): return {} if oq.ground_motion_fields: imts = oq.get_primary_imtls() nrups = len(self.datastore['ruptures']) base.create_gmf_data(self.datastore, imts, oq.get_sec_imts()) self.datastore.create_dset('gmf_data/sigma_epsilon', sig_eps_dt(oq.imtls)) self.datastore.create_dset('gmf_data/time_by_rup', time_dt, (nrups, ), fillvalue=None) # compute_gmfs in parallel nr = len(self.datastore['ruptures']) logging.info('Reading {:_d} ruptures'.format(nr)) allargs = [(rgetter, self.param) for rgetter in gen_rupture_getters( self.datastore, oq.concurrent_tasks)] # reading the args is fast since we are not prefiltering the ruptures, # nor reading the geometries; using an iterator would cause the usual # damned h5py error, last seen on macos self.datastore.swmr_on() smap = parallel.Starmap(self.core_task.__func__, allargs, h5=self.datastore.hdf5) smap.monitor.save('srcfilter', self.srcfilter) acc = smap.reduce(self.agg_dicts, self.acc0()) if 'gmf_data' not in self.datastore: return acc if oq.ground_motion_fields: with self.monitor('saving avg_gmf', measuremem=True): self.save_avg_gmf() return acc
def execute(self): oq = self.oqparam self.set_param() self.offset = 0 if oq.hazard_calculation_id: # from ruptures self.datastore.parent = util.read(oq.hazard_calculation_id) elif hasattr(self, 'csm'): # from sources self.build_events_from_sources() if (oq.ground_motion_fields is False and oq.hazard_curves_from_gmfs is False): return {} elif 'rupture_model' not in oq.inputs: # download ShakeMap logging.warning( 'There is no rupture_model, the calculator will just ' 'import data without performing any calculation') fake = logictree.FullLogicTree.fake() self.datastore['full_lt'] = fake # needed to expose the outputs return {} else: # scenario self._read_scenario_ruptures() if (oq.ground_motion_fields is False and oq.hazard_curves_from_gmfs is False): return {} if not oq.imtls: raise InvalidFile('There are no intensity measure types in %s' % oq.inputs['job_ini']) N = len(self.sitecol.complete) if oq.ground_motion_fields: M = len(oq.imtls) nrups = len(self.datastore['ruptures']) base.create_gmf_data(self.datastore, M, self.param['sec_perils']) self.datastore.create_dset('gmf_data/sigma_epsilon', sig_eps_dt(oq.imtls)) self.datastore.create_dset('gmf_data/events_by_sid', U32, (N, )) self.datastore.create_dset('gmf_data/time_by_rup', time_dt, (nrups, ), fillvalue=None) # compute_gmfs in parallel nr = len(self.datastore['ruptures']) self.datastore.swmr_on() logging.info('Reading {:_d} ruptures'.format(nr)) iterargs = ((rgetter, self.param) for rgetter in gen_rupture_getters( self.datastore, oq.concurrent_tasks)) smap = parallel.Starmap(self.core_task.__func__, iterargs, h5=self.datastore.hdf5) smap.monitor.save('srcfilter', self.srcfilter) acc = smap.reduce(self.agg_dicts, self.acc0()) if 'gmf_data' not in self.datastore: return acc if oq.ground_motion_fields: eids = self.datastore['gmf_data/eid'][:] rel_events = numpy.unique(eids) e = len(rel_events) if e == 0: raise RuntimeError('No GMFs were generated, perhaps they were ' 'all below the minimum_intensity threshold') elif e < len(self.datastore['events']): self.datastore['relevant_events'] = rel_events logging.info('Stored %d relevant event IDs', e) return acc
def execute(self): oq = self.oqparam dstore = self.datastore if oq.ground_motion_fields and oq.min_iml.sum() == 0: logging.warning('The GMFs are not filtered: ' 'you may want to set a minimum_intensity') else: logging.info('minimum_intensity=%s', oq.minimum_intensity) self.offset = 0 if oq.hazard_calculation_id: # from ruptures dstore.parent = datastore.read(oq.hazard_calculation_id) elif hasattr(self, 'csm'): # from sources self.build_events_from_sources() if (oq.ground_motion_fields is False and oq.hazard_curves_from_gmfs is False): return {} elif 'rupture_model' not in oq.inputs: logging.warning( 'There is no rupture_model, the calculator will just ' 'import data without performing any calculation') fake = logictree.FullLogicTree.fake() dstore['full_lt'] = fake # needed to expose the outputs dstore['weights'] = [1.] return {} else: # scenario self._read_scenario_ruptures() if (oq.ground_motion_fields is False and oq.hazard_curves_from_gmfs is False): return {} if oq.ground_motion_fields: imts = oq.get_primary_imtls() nrups = len(dstore['ruptures']) base.create_gmf_data(dstore, imts, oq.get_sec_imts()) dstore.create_dset('gmf_data/sigma_epsilon', sig_eps_dt(oq.imtls)) dstore.create_dset('gmf_data/time_by_rup', time_dt, (nrups, ), fillvalue=None) # event_based in parallel nr = len(dstore['ruptures']) logging.info('Reading {:_d} ruptures'.format(nr)) scenario = 'scenario' in oq.calculation_mode proxies = [ RuptureProxy(rec, scenario) for rec in dstore['ruptures'][:] ] full_lt = self.datastore['full_lt'] dstore.swmr_on() # must come before the Starmap smap = parallel.Starmap.apply_split( self.core_task.__func__, (proxies, full_lt, oq, self.datastore), key=operator.itemgetter('trt_smr'), weight=operator.itemgetter('n_occ'), h5=dstore.hdf5, concurrent_tasks=oq.concurrent_tasks or 1, duration=oq.time_per_task, split_level=oq.split_level) acc = smap.reduce(self.agg_dicts, self.acc0()) if 'gmf_data' not in dstore: return acc if oq.ground_motion_fields: with self.monitor('saving avg_gmf', measuremem=True): self.save_avg_gmf() return acc
def execute(self): oq = self.oqparam self.set_param() self.offset = 0 if oq.hazard_calculation_id: # from ruptures self.datastore.parent = util.read(oq.hazard_calculation_id) elif hasattr(self, 'csm'): # from sources self.build_events_from_sources() if (oq.ground_motion_fields is False and oq.hazard_curves_from_gmfs is False): return {} elif 'rupture_model' not in oq.inputs: # download ShakeMap logging.warning( 'There is no rupture_model, the calculator will just ' 'import data without performing any calculation') fake = logictree.FullLogicTree.fake() self.datastore['full_lt'] = fake # needed to expose the outputs return {} else: # scenario self._read_scenario_ruptures() if (oq.ground_motion_fields is False and oq.hazard_curves_from_gmfs is False): return {} N = len(self.sitecol.complete) if oq.ground_motion_fields: M = len(oq.get_primary_imtls()) nrups = len(self.datastore['ruptures']) base.create_gmf_data(self.datastore, M, oq.get_sec_imts()) self.datastore.create_dset('gmf_data/sigma_epsilon', sig_eps_dt(oq.imtls)) self.datastore.create_dset('gmf_data/events_by_sid', U32, (N, )) self.datastore.create_dset('gmf_data/time_by_rup', time_dt, (nrups, ), fillvalue=None) # compute_gmfs in parallel nr = len(self.datastore['ruptures']) logging.info('Reading {:_d} ruptures'.format(nr)) allargs = [(rgetter, self.param) for rgetter in gen_rupture_getters( self.datastore, oq.concurrent_tasks)] # reading the args is fast since we are not prefiltering the ruptures, # nor reading the geometries; using an iterator would cause the usual # damned h5py error, last seen on macos self.datastore.swmr_on() smap = parallel.Starmap(self.core_task.__func__, allargs, h5=self.datastore.hdf5) smap.monitor.save('srcfilter', self.srcfilter) acc = smap.reduce(self.agg_dicts, self.acc0()) if 'gmf_data' not in self.datastore: return acc if oq.ground_motion_fields: with self.monitor('saving avg_gmf', measuremem=True): self.weights = self.datastore['weights'][:] self.rlzs = self.datastore['events']['rlz_id'] self.num_events = numpy.bincount(self.rlzs) # events by rlz avg_gmf = { imt: numpy.zeros(self.N, F32) for imt in oq.all_imts() } rel_events = self.save_avg_gmf(avg_gmf) self.datastore.create_dframe('avg_gmf', avg_gmf.items()) e = len(rel_events) if e == 0: raise RuntimeError('No GMFs were generated, perhaps they were ' 'all below the minimum_intensity threshold') elif e < len(self.datastore['events']): self.datastore['relevant_events'] = rel_events logging.info('Stored %d relevant event IDs', e) return acc
def run_calc(self): """ Run a calculation and return results (reinvented from openquake.calculators.base) """ with self.calculator._monitor: self.calculator._monitor.username = '' try: # Pre-execute setups self.calculator.pre_execute() #self.calculator.datastore.swmr_on() oq = self.calculator.oqparam dstore = self.calculator.datastore self.calculator.set_param() self.calculator.offset = 0 # Source model print('self.__dict__ = ') print(self.calculator.__dict__) if oq.hazard_calculation_id: # from ruptures dstore.parent = self.calculator.datastore.read( oq.hazard_calculation_id) elif hasattr(self.calculator, 'csm'): # from sources self.calculator_build_events_from_sources() #self.calculator.build_events_from_sources() if (oq.ground_motion_fields is False and oq.hazard_curves_from_gmfs is False): return {} elif 'rupture_model' not in oq.inputs: logging.warning( 'There is no rupture_model, the calculator will just ' 'import data without performing any calculation') fake = logictree.FullLogicTree.fake() dstore['full_lt'] = fake # needed to expose the outputs dstore['weights'] = [1.] return {} else: # scenario self.calculator._read_scenario_ruptures() if (oq.ground_motion_fields is False and oq.hazard_curves_from_gmfs is False): return {} # Intensity measure models if oq.ground_motion_fields: imts = oq.get_primary_imtls() nrups = len(dstore['ruptures']) base.create_gmf_data(dstore, imts, oq.get_sec_imts()) dstore.create_dset('gmf_data/sigma_epsilon', getters.sig_eps_dt(oq.imtls)) dstore.create_dset('gmf_data/time_by_rup', getters.time_dt, (nrups, ), fillvalue=None) # Prepare inputs for GmfGetter nr = len(dstore['ruptures']) logging.info('Reading {:_d} ruptures'.format(nr)) rgetters = getters.get_rupture_getters( dstore, oq.concurrent_tasks * 1.25, srcfilter=self.calculator.srcfilter) args = [(rgetter, self.calculator.param) for rgetter in rgetters] mon = performance.Monitor() mon.version = version mon.config = config rcvr = 'tcp://%s:%s' % (config.dbserver.listen, config.dbserver.receiver_ports) skt = zeromq.Socket(rcvr, zeromq.zmq.PULL, 'bind').__enter__() mon.backurl = 'tcp://%s:%s' % (config.dbserver.host, skt.port) mon = mon.new(operation='total ' + self.calculator.core_task.__func__.__name__, measuremem=True) mon.weight = getattr(args[0], 'weight', 1.) # used in task_info mon.task_no = 1 # initialize the task number args += (mon, ) self.args = args self.mon = mon self.dstore = dstore finally: print('FetchOpenQuake: OpenQuake Hazard Calculator defined.')