def pre_execute(self): """ Read the site collection and initialize GmfComputer and seeds """ super(ScenarioCalculator, self).pre_execute() oq = self.oqparam trunc_level = oq.truncation_level correl_model = oq.get_correl_model() rup = readinput.get_rupture(oq) rup.seed = self.oqparam.random_seed self.gsims = readinput.get_gsims(oq) maxdist = oq.maximum_distance['default'] with self.monitor('filtering sites', autoflush=True): self.sitecol = filters.filter_sites_by_distance_to_rupture( rup, maxdist, self.sitecol) if self.sitecol is None: raise RuntimeError( 'All sites were filtered out! maximum_distance=%s km' % maxdist) # eid, ses, occ, sample events = numpy.array( [(eid, 1, 1, 0) for eid in range(oq.number_of_ground_motion_fields)], calc.event_dt) rupture = calc.EBRupture( rup, self.sitecol.sids, events, 'single_rupture', 0, 0) self.datastore['ruptures/grp-00/0'] = rupture self.computer = GmfComputer( rupture, self.sitecol, oq.imtls, self.gsims, trunc_level, correl_model) gsim_lt = readinput.get_gsim_lt(oq) cinfo = source.CompositionInfo.fake(gsim_lt) self.datastore['csm_info'] = cinfo self.rlzs_assoc = cinfo.get_rlzs_assoc()
def test_495_km(self): rup = self._make_rupture(7, 10, 30) # the JB distance area [5.84700762, 6.8290327, 14.53519629, # 496.25926891, 497.37116174] so given that the integration # distance is 495 only the first 3 sites are kept filtered = filters.filter_sites_by_distance_to_rupture( rup, integration_distance=495, sites=self.sitecol ) expected_filtered = SiteCollection(self.SITES[:3]) numpy.testing.assert_array_equal(filtered.indices, [0, 1, 2]) numpy.testing.assert_array_equal( filtered.vs30, expected_filtered.vs30 ) numpy.testing.assert_array_equal( filtered.vs30measured, expected_filtered.vs30measured ) numpy.testing.assert_array_equal( filtered.z1pt0, expected_filtered.z1pt0 ) numpy.testing.assert_array_equal( filtered.z2pt5, expected_filtered.z2pt5 ) numpy.testing.assert_array_equal( filtered.mesh.lons, expected_filtered.mesh.lons ) numpy.testing.assert_array_equal( filtered.mesh.lats, expected_filtered.mesh.lats ) numpy.testing.assert_array_equal( filtered.mesh.depths, expected_filtered.mesh.depths )
def gen_ruptures(sources, site_coll, maximum_distance, monitor): """ Yield (source, rupture, affected_sites) for each rupture generated by the given sources. :param sources: a sequence of sources :param site_coll: a SiteCollection instance :param maximum_distance: the maximum distance :param monitor: a Monitor object """ filtsources_mon = monitor('filtering sources') genruptures_mon = monitor('generating ruptures') filtruptures_mon = monitor('filtering ruptures') for src in sources: with filtsources_mon: s_sites = src.filter_sites_by_distance_to_source( maximum_distance, site_coll) if s_sites is None: continue with genruptures_mon: ruptures = list(src.iter_ruptures()) if not ruptures: continue for rupture in ruptures: with filtruptures_mon: r_sites = filters.filter_sites_by_distance_to_rupture( rupture, maximum_distance, s_sites) if r_sites is None: continue yield SourceRuptureSites(src, rupture, r_sites) filtsources_mon.flush() genruptures_mon.flush() filtruptures_mon.flush()
def build_ses_ruptures(src, num_occ_by_rup, s_sites, maximum_distance, sitecol): """ Filter the ruptures stored in the dictionary num_occ_by_rup and yield pairs (rupture, <list of associated SESRuptures>) """ rnd = random.Random(src.seed) for rup in sorted(num_occ_by_rup, key=operator.attrgetter('rup_no')): # filtering ruptures r_sites = filter_sites_by_distance_to_rupture(rup, maximum_distance, s_sites) if r_sites is None: # ignore ruptures which are far away del num_occ_by_rup[rup] # save memory continue indices = r_sites.indices if len(r_sites) < len(sitecol) \ else None # None means that nothing was filtered # creating SESRuptures sesruptures = [] for (col_id, ses_idx), num_occ in sorted(num_occ_by_rup[rup].items()): for occ_no in range(1, num_occ + 1): seed = rnd.randint(0, MAX_INT) tag = 'col=%02d~ses=%04d~src=%s~rup=%03d-%02d' % ( col_id, ses_idx, src.source_id, rup.rup_no, occ_no) sesruptures.append(SESRupture(rup, indices, seed, tag, col_id)) if sesruptures: yield rup, sesruptures
def pre_execute(self): """ Read the site collection and initialize GmfComputer, etags and seeds """ super(ScenarioCalculator, self).pre_execute() trunc_level = self.oqparam.truncation_level correl_model = readinput.get_correl_model(self.oqparam) n_gmfs = self.oqparam.number_of_ground_motion_fields rupture = readinput.get_rupture(self.oqparam) self.gsims = readinput.get_gsims(self.oqparam) self.rlzs_assoc = readinput.get_rlzs_assoc(self.oqparam) maxdist = self.oqparam.maximum_distance['default'] with self.monitor('filtering sites', autoflush=True): self.sitecol = filters.filter_sites_by_distance_to_rupture( rupture, maxdist, self.sitecol) if self.sitecol is None: raise RuntimeError( 'All sites were filtered out! maximum_distance=%s km' % maxdist) self.etags = numpy.array( sorted(['scenario-%010d' % i for i in range(n_gmfs)]), (bytes, 100)) self.computer = GmfComputer( rupture, self.sitecol, self.oqparam.imtls, self.gsims, trunc_level, correl_model)
def pre_execute(self): """ Read the site collection and initialize GmfComputer, etags and seeds """ super(ScenarioCalculator, self).pre_execute() oq = self.oqparam trunc_level = oq.truncation_level correl_model = readinput.get_correl_model(oq) n_gmfs = oq.number_of_ground_motion_fields rupture = readinput.get_rupture(oq) self.gsims = readinput.get_gsims(oq) maxdist = oq.maximum_distance['default'] with self.monitor('filtering sites', autoflush=True): self.sitecol = filters.filter_sites_by_distance_to_rupture( rupture, maxdist, self.sitecol) if self.sitecol is None: raise RuntimeError( 'All sites were filtered out! maximum_distance=%s km' % maxdist) self.etags = numpy.array( sorted(['scenario-%010d~ses=1' % i for i in range(n_gmfs)]), (bytes, 100)) self.computer = GmfComputer(rupture, self.sitecol, oq.imtls, self.gsims, trunc_level, correl_model) gsim_lt = readinput.get_gsim_lt(oq) cinfo = source.CompositionInfo.fake(gsim_lt) self.datastore['csm_info'] = cinfo self.rlzs_assoc = cinfo.get_rlzs_assoc()
def pre_execute(self): """ Read the site collection and initialize GmfComputer, tags and seeds """ super(ScenarioCalculator, self).pre_execute() trunc_level = self.oqparam.truncation_level correl_model = readinput.get_correl_model(self.oqparam) n_gmfs = self.oqparam.number_of_ground_motion_fields rupture = readinput.get_rupture(self.oqparam) self.gsims = readinput.get_gsims(self.oqparam) self.rlzs_assoc = readinput.get_rlzs_assoc(self.oqparam) # filter the sites self.sitecol = filters.filter_sites_by_distance_to_rupture( rupture, self.oqparam.maximum_distance, self.sitecol) if self.sitecol is None: raise RuntimeError('All sites were filtered out! ' 'maximum_distance=%s km' % self.oqparam.maximum_distance) self.tags = numpy.array( sorted(['scenario-%010d' % i for i in range(n_gmfs)]), (bytes, 100)) self.computer = GmfComputer(rupture, self.sitecol, self.oqparam.imtls, self.gsims, trunc_level, correl_model) rnd = random.Random(self.oqparam.random_seed) self.tag_seed_pairs = [(tag, rnd.randint(0, calc.MAX_INT)) for tag in self.tags] self.sescollection = [{ tag: Rupture(tag, seed, rupture) for tag, seed in self.tag_seed_pairs }]
def gen_ruptures(sources, site_coll, maximum_distance, monitor): """ Yield (source, rupture, affected_sites) for each rupture generated by the given sources. :param sources: a sequence of sources :param site_coll: a SiteCollection instance :param maximum_distance: the maximum distance :param monitor: a Monitor object """ filtsources_mon = monitor('filtering sources') genruptures_mon = monitor('generating ruptures') filtruptures_mon = monitor('filtering ruptures') for src in sources: with filtsources_mon: s_sites = src.filter_sites_by_distance_to_source( maximum_distance, site_coll) if s_sites is None: continue with genruptures_mon: ruptures = list(src.iter_ruptures()) if not ruptures: continue for rupture in ruptures: with filtruptures_mon: r_sites = filters.filter_sites_by_distance_to_rupture( rupture, maximum_distance, s_sites) if r_sites is None: continue yield SourceRuptureSites(src, rupture, r_sites)
def test_495_km(self): rup = self._make_rupture(7, 10, 30) # the JB distance area [5.84700762, 6.8290327, 14.53519629, # 496.25926891, 497.37116174] so given that the integration # distance is 495 only the first 3 sites are kept filtered = filters.filter_sites_by_distance_to_rupture( rup, integration_distance=495, sites=self.sitecol ) expected_filtered = SiteCollection(self.SITES[:3]) numpy.testing.assert_array_equal(filtered.indices, [0, 1, 2]) numpy.testing.assert_array_equal( filtered.vs30, expected_filtered.vs30 ) numpy.testing.assert_array_equal( filtered.vs30measured, expected_filtered.vs30measured ) numpy.testing.assert_array_equal( filtered.z1pt0, expected_filtered.z1pt0 ) numpy.testing.assert_array_equal( filtered.z2pt5, expected_filtered.z2pt5 ) numpy.testing.assert_array_equal( filtered.mesh.lons, expected_filtered.mesh.lons ) numpy.testing.assert_array_equal( filtered.mesh.lats, expected_filtered.mesh.lats ) numpy.testing.assert_array_equal( filtered.mesh.depths, expected_filtered.mesh.depths )
def build_ses_ruptures( src, num_occ_by_rup, s_sites, maximum_distance, sitecol): """ Filter the ruptures stored in the dictionary num_occ_by_rup and yield pairs (rupture, <list of associated SESRuptures>) """ rnd = random.Random(src.seed) for rup in sorted(num_occ_by_rup, key=operator.attrgetter('rup_no')): # filtering ruptures r_sites = filter_sites_by_distance_to_rupture( rup, maximum_distance, s_sites) if r_sites is None: # ignore ruptures which are far away del num_occ_by_rup[rup] # save memory continue indices = r_sites.indices if len(r_sites) < len(sitecol) \ else None # None means that nothing was filtered # creating SESRuptures sesruptures = [] for (col_id, ses_idx), num_occ in sorted( num_occ_by_rup[rup].iteritems()): for occ_no in range(1, num_occ + 1): seed = rnd.randint(0, MAX_INT) tag = 'col=%02d|ses=%04d|src=%s|rup=%03d-%02d' % ( col_id, ses_idx, src.source_id, rup.rup_no, occ_no) sesruptures.append( SESRupture(rup, indices, seed, tag, col_id)) if sesruptures: yield rup, sesruptures
def test_zero_integration_distance(self): rup = self._make_rupture(10, 15, 45) # the JB distances are [8.29156163, 5.05971598, 15.13297135, # 495.78630103, 496.89812309], so given that the integration # distance is 0 all sites are filtered out filtered = filters.filter_sites_by_distance_to_rupture(rup, integration_distance=0, sites=self.sitecol) self.assertIs(filtered, None)
def pre_execute(self): """ Read the site collection and initialize GmfComputer, tags and seeds """ super(ScenarioCalculator, self).pre_execute() trunc_level = self.oqparam.truncation_level correl_model = readinput.get_correl_model(self.oqparam) n_gmfs = self.oqparam.number_of_ground_motion_fields rupture = readinput.get_rupture(self.oqparam) self.gsims = readinput.get_gsims(self.oqparam) self.rlzs_assoc = readinput.get_rlzs_assoc(self.oqparam) # filter the sites self.sitecol = filters.filter_sites_by_distance_to_rupture( rupture, self.oqparam.maximum_distance, self.sitecol) if self.sitecol is None: raise RuntimeError( 'All sites were filtered out! ' 'maximum_distance=%s km' % self.oqparam.maximum_distance) self.tags = numpy.array( sorted(['scenario-%010d' % i for i in range(n_gmfs)]), (bytes, 100)) self.computer = GmfComputer( rupture, self.sitecol, self.oqparam.imtls, self.gsims, trunc_level, correl_model) rnd = random.Random(self.oqparam.random_seed) self.tag_seed_pairs = [(tag, rnd.randint(0, calc.MAX_INT)) for tag in self.tags] self.sescollection = [{tag: Rupture(tag, seed, rupture) for tag, seed in self.tag_seed_pairs}]
def test_zero_integration_distance(self): rup = self._make_rupture(10, 15, 45) # the JB distances are [8.29156163, 5.05971598, 15.13297135, # 495.78630103, 496.89812309], so given that the integration # distance is 0 all sites are filtered out filtered = filters.filter_sites_by_distance_to_rupture( rup, integration_distance=0, sites=self.sitecol) self.assertIs(filtered, None)
def test_filter_all_out(self): rup = self._make_rupture(50, 80, 9) # the JB distances are [47.0074159, 37.99716685, 40.7944923, # 476.2521365, 477.36015879] for int_dist in (0, 1, 10, 20, 37.99): filtered = filters.filter_sites_by_distance_to_rupture( rup, integration_distance=int_dist, sites=self.sitecol) self.assertIs(filtered, None)
def test_filter_all_out(self): rup = self._make_rupture(50, 80, 9) # the JB distances are [47.0074159, 37.99716685, 40.7944923, # 476.2521365, 477.36015879] for int_dist in (0, 1, 10, 20, 37.99): filtered = filters.filter_sites_by_distance_to_rupture( rup, integration_distance=int_dist, sites=self.sitecol ) self.assertIs(filtered, None)
def create_ruptures(self): # check filtering hc = self.hc if hc.maximum_distance: self.sites = filters.filter_sites_by_distance_to_rupture( self.rupture, hc.maximum_distance, hc.site_collection) if self.sites is None: raise RuntimeError( 'All sites where filtered out! ' 'maximum_distance=%s km' % hc.maximum_distance) # create ses output output = models.Output.objects.create( oq_job=self.job, display_name='SES Collection', output_type='ses') self.ses_coll = models.SESCollection.objects.create( output=output, lt_model=None, ordinal=0) # create gmf output output = models.Output.objects.create( oq_job=self.job, display_name="GMF", output_type="gmf_scenario") self.gmf = models.Gmf.objects.create(output=output) # creating seeds rnd = random.Random() rnd.seed(self.hc.random_seed) all_seeds = [ rnd.randint(0, models.MAX_SINT_32) for _ in xrange(self.hc.number_of_ground_motion_fields)] with self.monitor('saving ruptures'): # in order to save a ProbabilisticRupture, a TrtModel is needed; # here we generate a fake one, corresponding to the tectonic # region type NA i.e. Not Available trt_model = models.TrtModel.objects.create( tectonic_region_type='NA', num_sources=0, num_ruptures=len(all_seeds), min_mag=self.rupture.mag, max_mag=self.rupture.mag, gsims=[self.hc.gsim]) prob_rup = models.ProbabilisticRupture.create( self.rupture, self.ses_coll, trt_model) inserter = writer.CacheInserter(models.SESRupture, 100000) for ses_idx, seed in enumerate(all_seeds): inserter.add( models.SESRupture( ses_id=1, rupture=prob_rup, tag='scenario-%010d' % ses_idx, seed=seed)) inserter.flush()
def test(self): surface_mesh = RectangularMesh(self.POLYGON.lons.reshape((2, 2)), self.POLYGON.lats.reshape((2, 2)), depths=None) class rupture(object): class surface(object): @classmethod def get_joyner_boore_distance(cls, mesh): return surface_mesh.get_joyner_boore_distance(mesh) filtered = filters.filter_sites_by_distance_to_rupture( rupture=rupture, integration_distance=1.01, sites=self.sitecol) numpy.testing.assert_array_equal(filtered.indices, [0, 1, 2, 3, 4, 5, 6, 7, 8])
def test(self): surface_mesh = RectangularMesh(self.POLYGON.lons.reshape((2, 2)), self.POLYGON.lats.reshape((2, 2)), depths=None) class rupture(object): class surface(object): @classmethod def get_joyner_boore_distance(cls, mesh): return surface_mesh.get_joyner_boore_distance(mesh) filtered = filters.filter_sites_by_distance_to_rupture( rupture=rupture, integration_distance=1.01, sites=self.sitecol ) numpy.testing.assert_array_equal(filtered.indices, [0, 1, 2, 3, 4, 5, 6, 7, 8])
def __call__(self, rupture): """ :returns: a FilteredSiteCollection or None """ if self.min_iml: computer = calc.gmf.GmfComputer( rupture, self.sites, self.imts, self.gsims, self.trunc_level) [gmf] = computer.calcgmfs(1, rupture.seed) ok = numpy.zeros(len(self.sites), bool) for gsim in self.gsims: gmf_by_imt = gmf[str(gsim)] for imt in self.imts: ok += gmf_by_imt[imt] >= self.min_iml[imt] return computer.sites.filter(ok) else: # maximum_distance filtering return filter_sites_by_distance_to_rupture( rupture, self.max_dist, self.sites)
def create_ruptures(self): oqparam = models.oqparam(self.job.id) self.imts = map( from_string, sorted(oqparam.intensity_measure_types_and_levels)) self.rupture = get_rupture(oqparam) # check filtering trunc_level = getattr(oqparam, 'truncation_level', None) maximum_distance = oqparam.maximum_distance self.sites = filters.filter_sites_by_distance_to_rupture( self.rupture, maximum_distance, self.site_collection) if self.sites is None: raise RuntimeError( 'All sites where filtered out! ' 'maximum_distance=%s km' % maximum_distance) # create ses output output = models.Output.objects.create( oq_job=self.job, display_name='SES Collection', output_type='ses') self.ses_coll = models.SESCollection.create(output=output) # create gmf output output = models.Output.objects.create( oq_job=self.job, display_name="GMF", output_type="gmf_scenario") self.gmf = models.Gmf.objects.create(output=output) with self.monitor('saving ruptures'): self.tags = ['scenario-%010d' % i for i in xrange( oqparam.number_of_ground_motion_fields)] _, self.rupids, self.seeds = create_db_ruptures( self.rupture, self.ses_coll, self.tags, self.hc.random_seed) correlation_model = models.get_correl_model( models.OqJob.objects.get(pk=self.job.id)) gsim = AVAILABLE_GSIMS[oqparam.gsim]() self.computer = GmfComputer( self.rupture, self.site_collection, self.imts, gsim, trunc_level, correlation_model)
def create_ruptures(self): oqparam = models.oqparam(self.job.id) self.imts = map(from_string, oqparam.imtls) self.rupture = readinput.get_rupture(oqparam) # check filtering trunc_level = oqparam.truncation_level maximum_distance = oqparam.maximum_distance self.sites = filters.filter_sites_by_distance_to_rupture( self.rupture, maximum_distance, self.site_collection) if self.sites is None: raise RuntimeError( 'All sites where filtered out! ' 'maximum_distance=%s km' % maximum_distance) # create ses output output = models.Output.objects.create( oq_job=self.job, display_name='SES Collection', output_type='ses') self.ses_coll = models.SESCollection.create(output=output) # create gmf output output = models.Output.objects.create( oq_job=self.job, display_name="GMF", output_type="gmf_scenario") self.gmf = models.Gmf.objects.create(output=output) with self.monitor('saving ruptures', autoflush=True): self.tags = ['scenario-%010d' % i for i in xrange( oqparam.number_of_ground_motion_fields)] _, self.rupids, self.seeds = create_db_ruptures( self.rupture, self.ses_coll, self.tags, self.oqparam.random_seed) correlation_model = models.get_correl_model( models.OqJob.objects.get(pk=self.job.id)) gsim = valid.gsim(oqparam.gsim) self.computer = GmfComputer( self.rupture, self.sites, oqparam.imtls, [gsim], trunc_level, correlation_model)
def pre_execute(self): """ Read the site collection and initialize GmfComputer and seeds """ super(ScenarioCalculator, self).pre_execute() oq = self.oqparam trunc_level = oq.truncation_level correl_model = oq.get_correl_model() self.datastore["rupture"] = rupture = readinput.get_rupture(oq) self.gsims = readinput.get_gsims(oq) maxdist = oq.maximum_distance["default"] with self.monitor("filtering sites", autoflush=True): self.sitecol = filters.filter_sites_by_distance_to_rupture(rupture, maxdist, self.sitecol) if self.sitecol is None: raise RuntimeError("All sites were filtered out! maximum_distance=%s km" % maxdist) self.computer = GmfComputer(rupture, self.sitecol, oq.imtls, self.gsims, trunc_level, correl_model) gsim_lt = readinput.get_gsim_lt(oq) cinfo = source.CompositionInfo.fake(gsim_lt) self.datastore["csm_info"] = cinfo self.rlzs_assoc = cinfo.get_rlzs_assoc()
def create_ruptures(self): oqparam = models.oqparam(self.job.id) self.imts = map(from_string, oqparam.imtls) self.rupture = readinput.get_rupture(oqparam) # check filtering trunc_level = oqparam.truncation_level maximum_distance = oqparam.maximum_distance self.sites = filters.filter_sites_by_distance_to_rupture( self.rupture, maximum_distance, self.site_collection) if self.sites is None: raise RuntimeError('All sites where filtered out! ' 'maximum_distance=%s km' % maximum_distance) # create ses output output = models.Output.objects.create(oq_job=self.job, display_name='SES Collection', output_type='ses') self.ses_coll = models.SESCollection.create(output=output) # create gmf output output = models.Output.objects.create(oq_job=self.job, display_name="GMF", output_type="gmf_scenario") self.gmf = models.Gmf.objects.create(output=output) with self.monitor('saving ruptures', autoflush=True): self.tags = [ 'scenario-%010d' % i for i in xrange(oqparam.number_of_ground_motion_fields) ] _, self.rupids, self.seeds = create_db_ruptures( self.rupture, self.ses_coll, self.tags, self.oqparam.random_seed) correlation_model = models.get_correl_model( models.OqJob.objects.get(pk=self.job.id)) gsim = valid.gsim(oqparam.gsim) self.computer = GmfComputer(self.rupture, self.sites, oqparam.imtls, [gsim], trunc_level, correlation_model)
def pre_execute(self): """ Read the site collection and initialize GmfComputer and seeds """ super(ScenarioCalculator, self).pre_execute() oq = self.oqparam trunc_level = oq.truncation_level correl_model = oq.get_correl_model() rup = readinput.get_rupture(oq) rup.seed = self.oqparam.random_seed self.gsims = readinput.get_gsims(oq) maxdist = oq.maximum_distance['default'] with self.monitor('filtering sites', autoflush=True): self.sitecol = filters.filter_sites_by_distance_to_rupture( rup, maxdist, self.sitecol) if self.sitecol is None: raise RuntimeError( 'All sites were filtered out! maximum_distance=%s km' % maxdist) # eid, ses, occ, sample events = numpy.zeros(oq.number_of_ground_motion_fields, calc.stored_event_dt) events['eid'] = numpy.arange(oq.number_of_ground_motion_fields) rupture = calc.EBRupture(rup, self.sitecol.sids, events, 0, 0) rupture.sidx = 0 rupture.eidx1 = 0 rupture.eidx2 = len(events) self.datastore['sids'] = self.sitecol.sids self.datastore['events/grp-00'] = events array, nbytes = calc.RuptureSerializer.get_array_nbytes([rupture]) self.datastore.extend('ruptures/grp-00', array, nbytes=nbytes) self.computer = GmfComputer(rupture, self.sitecol, oq.imtls, self.gsims, trunc_level, correl_model) gsim_lt = readinput.get_gsim_lt(oq) cinfo = source.CompositionInfo.fake(gsim_lt) self.datastore['csm_info'] = cinfo self.rlzs_assoc = cinfo.get_rlzs_assoc()
def compute_ruptures( job_id, sitecol, src_seeds, trt_model_id, task_no): """ Celery task for the stochastic event set calculator. Samples logic trees and calls the stochastic event set calculator. Once stochastic event sets are calculated, results will be saved to the database. See :class:`openquake.engine.db.models.SESCollection`. Optionally (specified in the job configuration using the `ground_motion_fields` parameter), GMFs can be computed from each rupture in each stochastic event set. GMFs are also saved to the database. :param int job_id: ID of the currently running job. :param sitecol: a :class:`openquake.hazardlib.site.SiteCollection` instance :param src_seeds: List of pairs (source, seed) :param task_no: an ordinal so that GMV can be collected in a reproducible order """ # NB: all realizations in gsims correspond to the same source model trt_model = models.TrtModel.objects.get(pk=trt_model_id) ses_coll = models.SESCollection.objects.get(lt_model=trt_model.lt_model) hc = models.HazardCalculation.objects.get(oqjob=job_id) all_ses = range(1, hc.ses_per_logic_tree_path + 1) tot_ruptures = 0 filter_sites_mon = LightMonitor( 'filtering sites', job_id, compute_ruptures) generate_ruptures_mon = LightMonitor( 'generating ruptures', job_id, compute_ruptures) filter_ruptures_mon = LightMonitor( 'filtering ruptures', job_id, compute_ruptures) save_ruptures_mon = LightMonitor( 'saving ruptures', job_id, compute_ruptures) # Compute and save stochastic event sets rnd = random.Random() for src, seed in src_seeds: t0 = time.time() rnd.seed(seed) with filter_sites_mon: # filtering sources s_sites = src.filter_sites_by_distance_to_source( hc.maximum_distance, sitecol ) if hc.maximum_distance else sitecol if s_sites is None: continue # the dictionary `ses_num_occ` contains [(ses, num_occurrences)] # for each occurring rupture for each ses in the ses collection ses_num_occ = collections.defaultdict(list) with generate_ruptures_mon: # generating ruptures for the given source for rup_no, rup in enumerate(src.iter_ruptures(), 1): rup.rup_no = rup_no for ses_idx in all_ses: numpy.random.seed(rnd.randint(0, models.MAX_SINT_32)) num_occurrences = rup.sample_number_of_occurrences() if num_occurrences: ses_num_occ[rup].append((ses_idx, num_occurrences)) # NB: the number of occurrences is very low, << 1, so it is # more efficient to filter only the ruptures that occur, i.e. # to call sample_number_of_occurrences() *before* the filtering for rup in sorted(ses_num_occ, key=operator.attrgetter('rup_no')): with filter_ruptures_mon: # filtering ruptures r_sites = filters.filter_sites_by_distance_to_rupture( rup, hc.maximum_distance, s_sites ) if hc.maximum_distance else s_sites if r_sites is None: # ignore ruptures which are far away del ses_num_occ[rup] # save memory continue # saving ses_ruptures with save_ruptures_mon: # using a django transaction make the saving faster with transaction.commit_on_success(using='job_init'): indices = r_sites.indices if len(r_sites) < len(sitecol) \ else None # None means that nothing was filtered prob_rup = models.ProbabilisticRupture.create( rup, ses_coll, trt_model, indices) for ses_idx, num_occurrences in ses_num_occ[rup]: for occ_no in range(1, num_occurrences + 1): rup_seed = rnd.randint(0, models.MAX_SINT_32) models.SESRupture.create( prob_rup, ses_idx, src.source_id, rup.rup_no, occ_no, rup_seed) if ses_num_occ: num_ruptures = len(ses_num_occ) occ_ruptures = sum(num for rup in ses_num_occ for ses, num in ses_num_occ[rup]) tot_ruptures += occ_ruptures else: num_ruptures = rup_no occ_ruptures = 0 # save SourceInfo source_inserter.add( models.SourceInfo(trt_model_id=trt_model_id, source_id=src.source_id, source_class=src.__class__.__name__, num_sites=len(s_sites), num_ruptures=rup_no, occ_ruptures=occ_ruptures, uniq_ruptures=num_ruptures, calc_time=time.time() - t0)) filter_sites_mon.flush() generate_ruptures_mon.flush() filter_ruptures_mon.flush() save_ruptures_mon.flush() source_inserter.flush() return tot_ruptures, trt_model_id