def build_ruptures(sources, src_filter, param, monitor): """ :param sources: a list with a single UCERF source :param param: extra parameters :param monitor: a Monitor instance :returns: an AccumDict grp_id -> EBRuptures """ [src] = sources res = AccumDict() res.calc_times = [] sampl_mon = monitor('sampling ruptures', measuremem=True) res.trt = DEFAULT_TRT background_sids = src.get_background_sids(src_filter) samples = getattr(src, 'samples', 1) n_occ = AccumDict(accum=0) t0 = time.time() with sampl_mon: for sam_idx in range(samples): for ses_idx, ses_seed in param['ses_seeds']: seed = sam_idx * TWO16 + ses_seed rups, occs = generate_event_set(src, background_sids, src_filter, ses_idx, seed) for rup, occ in zip(rups, occs): n_occ[rup] += occ tot_occ = sum(n_occ.values()) dic = {'eff_ruptures': {src.src_group_id: src.num_ruptures}} eb_ruptures = [ EBRupture(rup, src.id, src.src_group_id, n, samples) for rup, n in n_occ.items() ] dic['rup_array'] = stochastic.get_rup_array(eb_ruptures, src_filter) dt = time.time() - t0 n = len(src_filter.sitecol) dic['calc_times'] = {src.id: numpy.array([tot_occ, n, dt], F32)} return dic
def build_ruptures(sources, src_filter, param, monitor): """ :param sources: a list with a single UCERF source :param param: extra parameters :param monitor: a Monitor instance :returns: an AccumDict grp_id -> EBRuptures """ [src] = sources res = AccumDict() res.calc_times = [] sampl_mon = monitor('sampling ruptures', measuremem=True) res.trt = DEFAULT_TRT background_sids = src.get_background_sids(src_filter) samples = getattr(src, 'samples', 1) n_occ = AccumDict(accum=0) t0 = time.time() with sampl_mon: for sam_idx in range(samples): for ses_idx, ses_seed in param['ses_seeds']: seed = sam_idx * TWO16 + ses_seed rups, occs = generate_event_set( src, background_sids, src_filter, ses_idx, seed) for rup, occ in zip(rups, occs): n_occ[rup] += occ tot_occ = sum(n_occ.values()) dic = {'eff_ruptures': {src.src_group_id: src.num_ruptures}} eb_ruptures = [EBRupture(rup, src.id, src.src_group_id, n, samples) for rup, n in n_occ.items()] dic['rup_array'] = stochastic.get_rup_array(eb_ruptures, src_filter) dt = time.time() - t0 dic['calc_times'] = {src.id: numpy.array([tot_occ, dt], F32)} return dic
def pre_execute(self): """ Read the site collection and initialize GmfComputer and seeds """ oq = self.oqparam cinfo = source.CompositionInfo.fake(readinput.get_gsim_lt(oq)) self.datastore['csm_info'] = cinfo if 'rupture_model' not in oq.inputs: logging.warn('There is no rupture_model, the calculator will just ' 'import data without performing any calculation') super().pre_execute() return self.rup = readinput.get_rupture(oq) self.gsims = readinput.get_gsims(oq) R = len(self.gsims) self.cmaker = ContextMaker(self.gsims, oq.maximum_distance, {'filter_distance': oq.filter_distance}) super().pre_execute() self.datastore['oqparam'] = oq self.rlzs_assoc = cinfo.get_rlzs_assoc() rlzs_by_gsim = self.rlzs_assoc.get_rlzs_by_gsim(0) E = oq.number_of_ground_motion_fields n_occ = numpy.array([E]) ebr = EBRupture(self.rup, 0, 0, self.sitecol.sids, n_occ) events = numpy.zeros(E * R, events_dt) for rlz, eids in ebr.get_eids_by_rlz(rlzs_by_gsim).items(): events[rlz * E:rlz * E + E]['eid'] = eids events[rlz * E:rlz * E + E]['rlz'] = rlz self.datastore['events'] = events rupser = calc.RuptureSerializer(self.datastore) rupser.save(get_rup_array([ebr])) rupser.close() self.computer = GmfComputer(ebr, self.sitecol, oq.imtls, self.cmaker, oq.truncation_level, oq.correl_model)
def _read_scenario_ruptures(self): oq = self.oqparam gsim_lt = readinput.get_gsim_lt(self.oqparam) G = gsim_lt.get_num_paths() if oq.calculation_mode.startswith('scenario'): ngmfs = oq.number_of_ground_motion_fields if oq.inputs['rupture_model'].endswith('.xml'): self.gsims = [gsim_rlz.value[0] for gsim_rlz in gsim_lt] self.cmaker = ContextMaker('*', self.gsims, { 'maximum_distance': oq.maximum_distance, 'imtls': oq.imtls }) rup = readinput.get_rupture(oq) if self.N > oq.max_sites_disagg: # many sites, split rupture ebrs = [ EBRupture(copyobj(rup, rup_id=rup.rup_id + i), 0, 0, G, e0=i * G) for i in range(ngmfs) ] else: # keep a single rupture with a big occupation number ebrs = [EBRupture(rup, 0, 0, G * ngmfs, rup.rup_id)] aw = get_rup_array(ebrs, self.srcfilter) if len(aw) == 0: raise RuntimeError( 'The rupture is too far from the sites! Please check the ' 'maximum_distance and the position of the rupture') elif oq.inputs['rupture_model'].endswith('.csv'): aw = readinput.get_ruptures(oq.inputs['rupture_model']) num_gsims = numpy.array( [len(gsim_lt.values[trt]) for trt in gsim_lt.values], U32) if oq.calculation_mode.startswith('scenario'): # rescale n_occ aw['n_occ'] *= ngmfs * num_gsims[aw['trt_smr']] rup_array = aw.array hdf5.extend(self.datastore['rupgeoms'], aw.geom) if len(rup_array) == 0: raise RuntimeError( 'There are no sites within the maximum_distance' ' of %s km from the rupture' % oq.maximum_distance(rup.tectonic_region_type, rup.mag)) # check the number of branchsets branchsets = len(gsim_lt._ltnode) if len(rup_array) == 1 and branchsets > 1: raise InvalidFile( '%s for a scenario calculation must contain a single ' 'branchset, found %d!' % (oq.inputs['job_ini'], branchsets)) fake = logictree.FullLogicTree.fake(gsim_lt) self.realizations = fake.get_realizations() self.datastore['full_lt'] = fake self.store_rlz_info({}) # store weights self.save_params() imp = calc.RuptureImporter(self.datastore) imp.import_rups_events(rup_array, get_rupture_getters)
def _read_scenario_ruptures(self): oq = self.oqparam gsim_lt = readinput.get_gsim_lt(self.oqparam) G = gsim_lt.get_num_paths() if oq.inputs['rupture_model'].endswith('.xml'): ngmfs = oq.number_of_ground_motion_fields self.gsims = readinput.get_gsims(oq) self.cmaker = ContextMaker('*', self.gsims, { 'maximum_distance': oq.maximum_distance, 'imtls': oq.imtls }) rup = readinput.get_rupture(oq) mesh = surface_to_array(rup.surface).transpose(1, 2, 0).flatten() if self.N > oq.max_sites_disagg: # many sites, split rupture ebrs = [ EBRupture(copyobj(rup, rup_id=rup.rup_id + i), 0, 0, G, e0=i * G) for i in range(ngmfs) ] meshes = numpy.array([mesh] * ngmfs, object) else: # keep a single rupture with a big occupation number ebrs = [EBRupture(rup, 0, 0, G * ngmfs, rup.rup_id)] meshes = numpy.array([mesh] * ngmfs, object) rup_array = get_rup_array(ebrs, self.srcfilter).array hdf5.extend(self.datastore['rupgeoms'], meshes) elif oq.inputs['rupture_model'].endswith('.csv'): aw = readinput.get_ruptures(oq.inputs['rupture_model']) aw.array['n_occ'] = G rup_array = aw.array hdf5.extend(self.datastore['rupgeoms'], aw.geom) if len(rup_array) == 0: raise RuntimeError( 'There are no sites within the maximum_distance' ' of %s km from the rupture' % oq.maximum_distance(rup.tectonic_region_type, rup.mag)) # check the number of branchsets branchsets = len(gsim_lt._ltnode) if len(rup_array) == 1 and branchsets > 1: raise InvalidFile( '%s for a scenario calculation must contain a single ' 'branchset, found %d!' % (oq.inputs['job_ini'], branchsets)) fake = logictree.FullLogicTree.fake(gsim_lt) self.realizations = fake.get_realizations() self.datastore['full_lt'] = fake self.store_rlz_info({}) # store weights self.save_params() calc.RuptureImporter(self.datastore).import_rups(rup_array)
def pre_execute(self): """ Read the site collection and initialize GmfComputer and seeds """ oq = self.oqparam cinfo = logictree.FullLogicTree.fake(readinput.get_gsim_lt(oq)) self.realizations = cinfo.get_realizations() self.datastore['full_lt'] = cinfo if 'rupture_model' not in oq.inputs: logging.warning( 'There is no rupture_model, the calculator will just ' 'import data without performing any calculation') super().pre_execute() return self.rup = readinput.get_rupture(oq) self.gsims = readinput.get_gsims(oq) R = len(self.gsims) self.cmaker = ContextMaker( '*', self.gsims, { 'maximum_distance': oq.maximum_distance, 'filter_distance': oq.filter_distance }) super().pre_execute() self.datastore['oqparam'] = oq self.store_rlz_info({}) rlzs_by_gsim = cinfo.get_rlzs_by_gsim(0) E = oq.number_of_ground_motion_fields n_occ = numpy.array([E]) ebr = EBRupture(self.rup, 0, 0, n_occ) ebr.e0 = 0 events = numpy.zeros(E * R, events_dt) for rlz, eids in ebr.get_eids_by_rlz(rlzs_by_gsim).items(): events[rlz * E:rlz * E + E]['id'] = eids events[rlz * E:rlz * E + E]['rlz_id'] = rlz self.datastore['events'] = self.events = events rupser = calc.RuptureSerializer(self.datastore) rup_array = get_rup_array([ebr], self.src_filter()) if len(rup_array) == 0: maxdist = oq.maximum_distance(self.rup.tectonic_region_type, self.rup.mag) raise RuntimeError('There are no sites within the maximum_distance' ' of %s km from the rupture' % maxdist) rupser.save(rup_array) rupser.close() self.computer = GmfComputer(ebr, self.sitecol, oq.imtls, self.cmaker, oq.truncation_level, oq.correl_model, self.amplifier) M32 = (numpy.float32, len(self.oqparam.imtls)) self.sig_eps_dt = [('eid', numpy.uint64), ('sig', M32), ('eps', M32)]
def _read_scenario_ruptures(self): oq = self.oqparam if oq.inputs['rupture_model'].endswith(('.xml', '.toml', '.txt')): self.gsims = readinput.get_gsims(oq) self.cmaker = ContextMaker( '*', self.gsims, {'maximum_distance': oq.maximum_distance, 'filter_distance': oq.filter_distance}) n_occ = numpy.array([oq.number_of_ground_motion_fields]) rup = readinput.get_rupture(oq) ebr = EBRupture(rup, 0, 0, n_occ) ebr.e0 = 0 rup_array = get_rup_array([ebr], self.srcfilter).array mesh = surface_to_array(rup.surface).transpose(1, 2, 0).flatten() hdf5.extend(self.datastore['rupgeoms'], numpy.array([mesh], object)) elif oq.inputs['rupture_model'].endswith('.csv'): aw = readinput.get_ruptures(oq.inputs['rupture_model']) rup_array = aw.array hdf5.extend(self.datastore['rupgeoms'], aw.geom) if len(rup_array) == 0: raise RuntimeError( 'There are no sites within the maximum_distance' ' of %s km from the rupture' % oq.maximum_distance( rup.tectonic_region_type, rup.mag)) gsim_lt = readinput.get_gsim_lt(self.oqparam) # check the number of branchsets branchsets = len(gsim_lt._ltnode) if len(rup_array) == 1 and branchsets > 1: raise InvalidFile( '%s for a scenario calculation must contain a single ' 'branchset, found %d!' % (oq.inputs['job_ini'], branchsets)) fake = logictree.FullLogicTree.fake(gsim_lt) self.realizations = fake.get_realizations() self.datastore['full_lt'] = fake self.store_rlz_info({}) # store weights self.save_params() calc.RuptureImporter(self.datastore).import_rups(rup_array)
def build_ruptures(sources, src_filter, param, monitor): """ :param sources: a list with a single UCERF source :param src_filter: a SourceFilter instance :param param: extra parameters :param monitor: a Monitor instance :returns: an AccumDict grp_id -> EBRuptures """ [src] = sources res = AccumDict() res.calc_times = [] sampl_mon = monitor('sampling ruptures', measuremem=True) filt_mon = monitor('filtering ruptures', measuremem=False) res.trt = DEFAULT_TRT background_sids = src.get_background_sids(src_filter) sitecol = src_filter.sitecol cmaker = ContextMaker(param['gsims'], src_filter.integration_distance) num_ses = param['ses_per_logic_tree_path'] samples = getattr(src, 'samples', 1) n_occ = AccumDict(accum=0) t0 = time.time() with sampl_mon: for sam_idx in range(samples): for ses_idx, ses_seed in param['ses_seeds']: seed = sam_idx * TWO16 + ses_seed rups, occs = generate_event_set(src, background_sids, src_filter, ses_idx, seed) for rup, occ in zip(rups, occs): n_occ[rup] += occ tot_occ = sum(n_occ.values()) dic = {'eff_ruptures': {src.src_group_id: src.num_ruptures}} with filt_mon: eb_ruptures = stochastic.build_eb_ruptures(src, num_ses, cmaker, sitecol, n_occ.items()) dic['rup_array'] = (stochastic.get_rup_array(eb_ruptures) if eb_ruptures else ()) dt = time.time() - t0 dic['calc_times'] = {src.id: numpy.array([tot_occ, len(sitecol), dt], F32)} return dic
def _read_scenario_ruptures(self): oq = self.oqparam gsim_lt = readinput.get_gsim_lt(self.oqparam) G = gsim_lt.get_num_paths() if oq.calculation_mode.startswith('scenario'): ngmfs = oq.number_of_ground_motion_fields if oq.inputs['rupture_model'].endswith('.xml'): # check the number of branchsets bsets = len(gsim_lt._ltnode) if bsets > 1: raise InvalidFile( '%s for a scenario calculation must contain a single ' 'branchset, found %d!' % (oq.inputs['job_ini'], bsets)) [(trt, rlzs_by_gsim)] = gsim_lt.get_rlzs_by_gsim_trt().items() self.cmaker = ContextMaker( trt, rlzs_by_gsim, { 'maximum_distance': oq.maximum_distance(trt), 'minimum_distance': oq.minimum_distance, 'truncation_level': oq.truncation_level, 'imtls': oq.imtls }) rup = readinput.get_rupture(oq) if self.N > oq.max_sites_disagg: # many sites, split rupture ebrs = [ EBRupture(copyobj(rup, rup_id=rup.rup_id + i), 'NA', 0, G, e0=i * G, scenario=True) for i in range(ngmfs) ] else: # keep a single rupture with a big occupation number ebrs = [ EBRupture(rup, 'NA', 0, G * ngmfs, rup.rup_id, scenario=True) ] srcfilter = SourceFilter(self.sitecol, oq.maximum_distance(trt)) aw = get_rup_array(ebrs, srcfilter) if len(aw) == 0: raise RuntimeError( 'The rupture is too far from the sites! Please check the ' 'maximum_distance and the position of the rupture') elif oq.inputs['rupture_model'].endswith('.csv'): aw = get_ruptures(oq.inputs['rupture_model']) if len(gsim_lt.values) == 1: # fix for scenario_damage/case_12 aw['trt_smr'] = 0 # a single TRT if oq.calculation_mode.startswith('scenario'): # rescale n_occ by ngmfs and nrlzs aw['n_occ'] *= ngmfs * gsim_lt.get_num_paths() else: raise InvalidFile("Something wrong in %s" % oq.inputs['job_ini']) rup_array = aw.array hdf5.extend(self.datastore['rupgeoms'], aw.geom) if len(rup_array) == 0: raise RuntimeError( 'There are no sites within the maximum_distance' ' of %s km from the rupture' % oq.maximum_distance(rup.tectonic_region_type)(rup.mag)) fake = logictree.FullLogicTree.fake(gsim_lt) self.realizations = fake.get_realizations() self.datastore['full_lt'] = fake self.store_rlz_info({}) # store weights self.save_params() imp = calc.RuptureImporter(self.datastore) imp.import_rups_events(rup_array, get_rupture_getters)