def pre_execute(self): """ Inherited from core.EventBasedRiskCalculator.pre_execute. Enforces no correlation, both on GMFs and assets. """ correl_model = models.get_correl_model(self.job) assert correl_model is None, correl_model assert not self.rc.asset_correlation, self.rc.asset_correlation core.EventBasedRiskCalculator.pre_execute(self)
def compute_gmfs_and_curves(job_id, ses_ruptures, sitecol): """ :param int job_id: ID of the currently running job :param ses_ruptures: a list of blocks of SESRuptures with homogeneous TrtModel :param sitecol: a :class:`openquake.hazardlib.site.SiteCollection` instance :returns: a dictionary trt_model_id -> (curves_by_gsim, bounding_boxes) where the list of bounding boxes is empty """ job = models.OqJob.objects.get(pk=job_id) hc = job.get_oqparam() imts = map(from_string, sorted(hc.intensity_measure_types_and_levels)) result = {} # trt_model_id -> (curves_by_gsim, []) # NB: by construction each block is a non-empty list with # ruptures of homogeneous trt_model trt_model = ses_ruptures[0].rupture.ses_collection.trt_model rlzs_by_gsim = trt_model.get_rlzs_by_gsim() gsims = [logictree.GSIM[gsim]() for gsim in rlzs_by_gsim] calc = GmfCalculator( sorted(imts), sorted(gsims), trt_model.id, getattr(hc, 'truncation_level', None), models.get_correl_model(job)) with EnginePerformanceMonitor( 'computing gmfs', job_id, compute_gmfs_and_curves): for rupture, group in itertools.groupby( ses_ruptures, operator.attrgetter('rupture')): r_sites = sitecol if rupture.site_indices is None \ else FilteredSiteCollection(rupture.site_indices, sitecol) calc.calc_gmfs( r_sites, rupture, [(r.id, r.seed) for r in group]) if getattr(hc, 'hazard_curves_from_gmfs', None): with EnginePerformanceMonitor( 'hazard curves from gmfs', job_id, compute_gmfs_and_curves): result[trt_model.id] = (calc.to_haz_curves( sitecol.sids, hc.intensity_measure_types_and_levels, hc.investigation_time, hc.ses_per_logic_tree_path), []) else: result[trt_model.id] = ([], []) if hc.ground_motion_fields: with EnginePerformanceMonitor( 'saving gmfs', job_id, compute_gmfs_and_curves): calc.save_gmfs(rlzs_by_gsim) return result
def compute_gmfs_and_curves(ses_ruptures, sitecol, rlzs_assoc, monitor): """ :param ses_ruptures: a list of blocks of SESRuptures with homogeneous TrtModel :param sitecol: a :class:`openquake.hazardlib.site.SiteCollection` instance :param rlzs_assoc: a :class:`openquake.commonlib.source.RlzsAssoc` instance :param monitor: monitor of the currently running job :returns: a dictionary trt_model_id -> (curves_by_gsim, bounding_boxes) where the list of bounding boxes is empty """ job = models.OqJob.objects.get(pk=monitor.job_id) hc = job.get_oqparam() imts = hc.imtls result = {} # trt_model_id -> (curves_by_gsim, []) # NB: by construction each block is a non-empty list with # ruptures of homogeneous SESCollection ses_coll = ses_ruptures[0].rupture.ses_collection trt_model = ses_coll.trt_model gsims = rlzs_assoc.get_gsims_by_trt_id()[trt_model.id] calc = GmfCalculator( sorted(imts), sorted(gsims), ses_coll, hc.truncation_level, models.get_correl_model(job)) with monitor('computing gmfs', autoflush=True): for rupture, group in itertools.groupby( ses_ruptures, operator.attrgetter('rupture')): r_sites = sitecol if rupture.site_indices is None \ else FilteredSiteCollection(rupture.site_indices, sitecol) calc.calc_gmfs( r_sites, rupture, [(r.id, r.seed) for r in group]) if hc.hazard_curves_from_gmfs: duration = hc.investigation_time * hc.ses_per_logic_tree_path * ( hc.number_of_logic_tree_samples or 1) with monitor('hazard curves from gmfs', autoflush=True): result[trt_model.id] = (calc.to_haz_curves( sitecol.sids, hc.imtls, hc.investigation_time, duration), []) else: result[trt_model.id] = ([], []) if hc.ground_motion_fields: with monitor('saving gmfs', autoflush=True): calc.save_gmfs(rlzs_assoc) return result
def gmfs(job_id, ses_ruptures, sitecol, gmf_id): """ :param int job_id: the current job ID :param ses_ruptures: a set of `SESRupture` instances :param sitecol: a `SiteCollection` instance :param int gmf_id: the ID of a `Gmf` instance """ job = models.OqJob.objects.get(pk=job_id) hc = job.hazard_calculation # distinct is here to make sure that IMTs such as # SA(0.8) and SA(0.80) are considered the same imts = distinct(from_string(x) for x in sorted(hc.intensity_measure_types)) gsim = AVAILABLE_GSIMS[hc.gsim]() # instantiate the GSIM class correlation_model = models.get_correl_model(job) cache = collections.defaultdict(list) # {site_id, imt -> gmvs} inserter = writer.CacheInserter(models.GmfData, 1000) # insert GmfData in blocks of 1000 sites # NB: ses_ruptures a non-empty list produced by the block_splitter rupture = ses_ruptures[0].rupture # ProbabilisticRupture instance with EnginePerformanceMonitor('computing gmfs', job_id, gmfs): gmf = GmfComputer(rupture, sitecol, imts, [gsim], hc.truncation_level, correlation_model) gname = gsim.__class__.__name__ for ses_rup in ses_ruptures: for (gname, imt), gmvs in gmf.compute(ses_rup.seed): for site_id, gmv in zip(sitecol.sids, gmvs): # float may be needed below to convert 1x1 matrices cache[site_id, imt].append((gmv, ses_rup.id)) with EnginePerformanceMonitor('saving gmfs', job_id, gmfs): for (site_id, imt_str), data in cache.iteritems(): imt = from_string(imt_str) gmvs, rup_ids = zip(*data) inserter.add( models.GmfData( gmf_id=gmf_id, task_no=0, imt=imt[0], sa_period=imt[1], sa_damping=imt[2], site_id=site_id, rupture_ids=rup_ids, gmvs=gmvs)) inserter.flush()
def create_ruptures(self): oqparam = models.oqparam(self.job.id) self.imts = map( from_string, sorted(oqparam.intensity_measure_types_and_levels)) self.rupture = get_rupture(oqparam) # check filtering trunc_level = getattr(oqparam, 'truncation_level', None) maximum_distance = oqparam.maximum_distance self.sites = filters.filter_sites_by_distance_to_rupture( self.rupture, maximum_distance, self.site_collection) if self.sites is None: raise RuntimeError( 'All sites where filtered out! ' 'maximum_distance=%s km' % maximum_distance) # create ses output output = models.Output.objects.create( oq_job=self.job, display_name='SES Collection', output_type='ses') self.ses_coll = models.SESCollection.create(output=output) # create gmf output output = models.Output.objects.create( oq_job=self.job, display_name="GMF", output_type="gmf_scenario") self.gmf = models.Gmf.objects.create(output=output) with self.monitor('saving ruptures'): self.tags = ['scenario-%010d' % i for i in xrange( oqparam.number_of_ground_motion_fields)] _, self.rupids, self.seeds = create_db_ruptures( self.rupture, self.ses_coll, self.tags, self.hc.random_seed) correlation_model = models.get_correl_model( models.OqJob.objects.get(pk=self.job.id)) gsim = AVAILABLE_GSIMS[oqparam.gsim]() self.computer = GmfComputer( self.rupture, self.site_collection, self.imts, gsim, trunc_level, correlation_model)
def create_ruptures(self): oqparam = models.oqparam(self.job.id) self.imts = map(from_string, oqparam.imtls) self.rupture = readinput.get_rupture(oqparam) # check filtering trunc_level = oqparam.truncation_level maximum_distance = oqparam.maximum_distance self.sites = filters.filter_sites_by_distance_to_rupture( self.rupture, maximum_distance, self.site_collection) if self.sites is None: raise RuntimeError( 'All sites where filtered out! ' 'maximum_distance=%s km' % maximum_distance) # create ses output output = models.Output.objects.create( oq_job=self.job, display_name='SES Collection', output_type='ses') self.ses_coll = models.SESCollection.create(output=output) # create gmf output output = models.Output.objects.create( oq_job=self.job, display_name="GMF", output_type="gmf_scenario") self.gmf = models.Gmf.objects.create(output=output) with self.monitor('saving ruptures', autoflush=True): self.tags = ['scenario-%010d' % i for i in xrange( oqparam.number_of_ground_motion_fields)] _, self.rupids, self.seeds = create_db_ruptures( self.rupture, self.ses_coll, self.tags, self.oqparam.random_seed) correlation_model = models.get_correl_model( models.OqJob.objects.get(pk=self.job.id)) gsim = valid.gsim(oqparam.gsim) self.computer = GmfComputer( self.rupture, self.sites, oqparam.imtls, [gsim], trunc_level, correlation_model)
def create_ruptures(self): oqparam = models.oqparam(self.job.id) self.imts = map(from_string, oqparam.imtls) self.rupture = readinput.get_rupture(oqparam) # check filtering trunc_level = oqparam.truncation_level maximum_distance = oqparam.maximum_distance self.sites = filters.filter_sites_by_distance_to_rupture( self.rupture, maximum_distance, self.site_collection) if self.sites is None: raise RuntimeError('All sites where filtered out! ' 'maximum_distance=%s km' % maximum_distance) # create ses output output = models.Output.objects.create(oq_job=self.job, display_name='SES Collection', output_type='ses') self.ses_coll = models.SESCollection.create(output=output) # create gmf output output = models.Output.objects.create(oq_job=self.job, display_name="GMF", output_type="gmf_scenario") self.gmf = models.Gmf.objects.create(output=output) with self.monitor('saving ruptures', autoflush=True): self.tags = [ 'scenario-%010d' % i for i in xrange(oqparam.number_of_ground_motion_fields) ] _, self.rupids, self.seeds = create_db_ruptures( self.rupture, self.ses_coll, self.tags, self.oqparam.random_seed) correlation_model = models.get_correl_model( models.OqJob.objects.get(pk=self.job.id)) gsim = valid.gsim(oqparam.gsim) self.computer = GmfComputer(self.rupture, self.sites, oqparam.imtls, [gsim], trunc_level, correlation_model)