Пример #1
0
    def pre_execute(self):
        """
        Read the site collection and initialize GmfComputer, tags and seeds
        """
        super(ScenarioCalculator, self).pre_execute()
        trunc_level = self.oqparam.truncation_level
        correl_model = readinput.get_correl_model(self.oqparam)
        n_gmfs = self.oqparam.number_of_ground_motion_fields
        rupture = readinput.get_rupture(self.oqparam)
        self.gsims = readinput.get_gsims(self.oqparam)
        self.rlzs_assoc = readinput.get_rlzs_assoc(self.oqparam)

        # filter the sites
        self.sitecol = filters.filter_sites_by_distance_to_rupture(
            rupture, self.oqparam.maximum_distance, self.sitecol)
        if self.sitecol is None:
            raise RuntimeError('All sites were filtered out! '
                               'maximum_distance=%s km' %
                               self.oqparam.maximum_distance)
        self.tags = numpy.array(
            sorted(['scenario-%010d' % i for i in range(n_gmfs)]),
            (bytes, 100))
        self.computer = GmfComputer(rupture, self.sitecol, self.oqparam.imtls,
                                    self.gsims, trunc_level, correl_model)
        rnd = random.Random(self.oqparam.random_seed)
        self.tag_seed_pairs = [(tag, rnd.randint(0, calc.MAX_INT))
                               for tag in self.tags]
        self.sescollection = [{
            tag: Rupture(tag, seed, rupture)
            for tag, seed in self.tag_seed_pairs
        }]
Пример #2
0
    def test_from_ruptures(self):
        oq = self.oqparam
        correl_model = readinput.get_correl_model(oq)
        rupcalc = event_based.EventBasedRuptureCalculator(oq)
        rupcalc.run()
        dstore = get_datastore(rupcalc)

        # this is case with a single SES collection
        ses_ruptures = dstore['sescollection'][0].values()

        gsims_by_trt_id = rupcalc.rlzs_assoc.get_gsims_by_trt_id()

        eps_dict = riskinput.make_eps_dict(
            self.assets_by_site, len(ses_ruptures), oq.master_seed,
            oq.asset_correlation)

        [ri] = self.riskmodel.build_inputs_from_ruptures(
            self.sitecol, ses_ruptures, gsims_by_trt_id, oq.truncation_level,
            correl_model, eps_dict, 1)

        assets, hazards, epsilons = ri.get_all(rlzs_assoc, self.assets_by_site)
        self.assertEqual([a.id for a in assets],
                         ['a0', 'a1', 'a2', 'a3', 'a4'])
        self.assertEqual(set(a.taxonomy for a in assets),
                         set(['RM', 'RC', 'W']))
        self.assertEqual(map(len, epsilons), [20] * 5)
Пример #3
0
    def pre_execute(self):
        """
        Read the precomputed ruptures (or compute them on the fly) and
        prepare some empty files in the export directory to store the gmfs
        (if any). If there were pre-existing files, they will be erased.
        """
        super(EventBasedRiskCalculator, self).pre_execute()

        oq = self.oqparam
        epsilon_sampling = getattr(oq, 'epsilon_sampling', 1000)

        correl_model = readinput.get_correl_model(oq)
        gsims_by_col = self.rlzs_assoc.get_gsims_by_col()
        assets_by_site = self.assets_by_site
        logging.info('Building the epsilons')

        logging.info('Populating the risk inputs')
        rup_by_tag = sum(self.datastore['sescollection'], AccumDict())
        all_ruptures = [rup_by_tag[tag] for tag in sorted(rup_by_tag)]
        num_samples = min(len(all_ruptures), epsilon_sampling)
        eps_dict = riskinput.make_eps_dict(
            assets_by_site, num_samples, oq.master_seed, oq.asset_correlation)
        logging.info('Generated %d epsilons', num_samples * len(eps_dict))
        self.epsilon_matrix = numpy.array(
            [eps_dict[a['asset_ref']] for a in self.assetcol])
        self.riskinputs = list(self.riskmodel.build_inputs_from_ruptures(
            self.sitecol.complete, all_ruptures, gsims_by_col,
            oq.truncation_level, correl_model, eps_dict,
            oq.concurrent_tasks or 1))
        logging.info('Built %d risk inputs', len(self.riskinputs))
Пример #4
0
    def pre_execute(self):
        """
        Read the site collection and initialize GmfComputer, tags and seeds
        """
        super(ScenarioCalculator, self).pre_execute()
        trunc_level = self.oqparam.truncation_level
        correl_model = readinput.get_correl_model(self.oqparam)
        n_gmfs = self.oqparam.number_of_ground_motion_fields
        rupture = readinput.get_rupture(self.oqparam)
        self.gsims = readinput.get_gsims(self.oqparam)
        self.rlzs_assoc = readinput.get_rlzs_assoc(self.oqparam)

        # filter the sites
        self.sitecol = filters.filter_sites_by_distance_to_rupture(
            rupture, self.oqparam.maximum_distance, self.sitecol)
        if self.sitecol is None:
            raise RuntimeError(
                'All sites were filtered out! '
                'maximum_distance=%s km' % self.oqparam.maximum_distance)
        self.tags = numpy.array(
            sorted(['scenario-%010d' % i for i in range(n_gmfs)]),
            (bytes, 100))
        self.computer = GmfComputer(
            rupture, self.sitecol, self.oqparam.imtls, self.gsims,
            trunc_level, correl_model)
        rnd = random.Random(self.oqparam.random_seed)
        self.tag_seed_pairs = [(tag, rnd.randint(0, calc.MAX_INT))
                               for tag in self.tags]
        self.sescollection = [{tag: Rupture(tag, seed, rupture)
                               for tag, seed in self.tag_seed_pairs}]
Пример #5
0
def compute_gmfs_and_curves(eb_ruptures, sitecol, imts, rlzs_assoc,
                            min_iml, monitor):
    """
    :param eb_ruptures:
        a list of blocks of EBRuptures of the same SESCollection
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :param imts:
        a list of IMT string
    :param rlzs_assoc:
        a RlzsAssoc instance
    :param monitor:
        a Monitor instance
    :returns:
        a dictionary (rlzi, imt) -> [gmfarray, haz_curves]
   """
    oq = monitor.oqparam
    # NB: by construction each block is a non-empty list with
    # ruptures of the same src_group_id
    trunc_level = oq.truncation_level
    correl_model = readinput.get_correl_model(oq)
    gmfadict = create(
        calc.GmfColl, eb_ruptures, sitecol, imts, rlzs_assoc, trunc_level,
        correl_model, min_iml, monitor).by_rlzi()
    result = {rlzi: [gmfadict[rlzi], None]
              if oq.ground_motion_fields else [None, None]
              for rlzi in gmfadict}
    if oq.hazard_curves_from_gmfs:
        with monitor('bulding hazard curves', measuremem=False):
            duration = oq.investigation_time * oq.ses_per_logic_tree_path
            for rlzi in gmfadict:
                gmvs_by_sid = group_array(gmfadict[rlzi], 'sid')
                result[rlzi][POEMAP] = calc.gmvs_to_poe_map(
                    gmvs_by_sid, oq.imtls, oq.investigation_time, duration)
    return result
Пример #6
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer, etags and seeds
     """
     super(ScenarioCalculator, self).pre_execute()
     trunc_level = self.oqparam.truncation_level
     correl_model = readinput.get_correl_model(self.oqparam)
     n_gmfs = self.oqparam.number_of_ground_motion_fields
     rupture = readinput.get_rupture(self.oqparam)
     self.gsims = readinput.get_gsims(self.oqparam)
     self.rlzs_assoc = readinput.get_rlzs_assoc(self.oqparam)
     maxdist = self.oqparam.maximum_distance['default']
     with self.monitor('filtering sites', autoflush=True):
         self.sitecol = filters.filter_sites_by_distance_to_rupture(
             rupture, maxdist, self.sitecol)
     if self.sitecol is None:
         raise RuntimeError(
             'All sites were filtered out! maximum_distance=%s km' %
             maxdist)
     self.etags = numpy.array(
         sorted(['scenario-%010d' % i for i in range(n_gmfs)]),
         (bytes, 100))
     self.computer = GmfComputer(
         rupture, self.sitecol, self.oqparam.imtls, self.gsims,
         trunc_level, correl_model)
Пример #7
0
def calc_gmfs_fast(oqparam, sitecol):
    """
    Build all the ground motion fields for the whole site collection in
    a single step.
    """
    max_dist = oqparam.maximum_distance
    correl_model = get_correl_model(oqparam)
    seed = oqparam.random_seed
    imts = get_imts(oqparam)
    [gsim] = get_gsims(oqparam)
    trunc_level = oqparam.truncation_level
    n_gmfs = oqparam.number_of_ground_motion_fields
    rupture = get_rupture(oqparam)
    res = gmf.ground_motion_fields(
        rupture,
        sitecol,
        imts,
        gsim,
        trunc_level,
        n_gmfs,
        correl_model,
        filters.rupture_site_distance_filter(max_dist),
        seed,
    )
    return {str(imt): matrix for imt, matrix in res.items()}
Пример #8
0
    def test_from_ruptures(self):
        oq = self.oqparam
        correl_model = readinput.get_correl_model(oq)
        rupcalc = event_based.EventBasedRuptureCalculator(oq)
        rupcalc.run()
        dstore = get_datastore(rupcalc)

        # this is case with a single SES collection
        ses_ruptures = list(dstore['sescollection'][0].values())

        gsims_by_trt_id = rupcalc.rlzs_assoc.gsims_by_trt_id

        eps_dict = riskinput.make_eps_dict(self.assets_by_site,
                                           len(ses_ruptures), oq.master_seed,
                                           oq.asset_correlation)

        [ri] = self.riskmodel.build_inputs_from_ruptures(
            self.sitecol, ses_ruptures, gsims_by_trt_id, oq.truncation_level,
            correl_model, eps_dict, 1)

        assets, hazards, epsilons = ri.get_all(rlzs_assoc, self.assets_by_site,
                                               eps_dict)
        self.assertEqual([a.id for a in assets],
                         [b'a0', b'a1', b'a2', b'a3', b'a4'])
        self.assertEqual(set(a.taxonomy for a in assets),
                         set(['RM', 'RC', 'W']))
        self.assertEqual(list(map(len, epsilons)), [20] * 5)
Пример #9
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer, etags and seeds
     """
     super(ScenarioCalculator, self).pre_execute()
     oq = self.oqparam
     trunc_level = oq.truncation_level
     correl_model = readinput.get_correl_model(oq)
     n_gmfs = oq.number_of_ground_motion_fields
     rupture = readinput.get_rupture(oq)
     self.gsims = readinput.get_gsims(oq)
     maxdist = oq.maximum_distance['default']
     with self.monitor('filtering sites', autoflush=True):
         self.sitecol = filters.filter_sites_by_distance_to_rupture(
             rupture, maxdist, self.sitecol)
     if self.sitecol is None:
         raise RuntimeError(
             'All sites were filtered out! maximum_distance=%s km' %
             maxdist)
     self.etags = numpy.array(
         sorted(['scenario-%010d~ses=1' % i for i in range(n_gmfs)]),
         (bytes, 100))
     self.computer = GmfComputer(rupture, self.sitecol, oq.imtls,
                                 self.gsims, trunc_level, correl_model)
     gsim_lt = readinput.get_gsim_lt(oq)
     cinfo = source.CompositionInfo.fake(gsim_lt)
     self.datastore['csm_info'] = cinfo
     self.rlzs_assoc = cinfo.get_rlzs_assoc()
Пример #10
0
 def execute(self):
     gmfs_by_imt = ground_motion_fields(
         self.rupture, self.sitecol, self.imts, self.gsim,
         self.oqparam.truncation_level,
         self.oqparam.number_of_ground_motion_fields,
         correlation_model=readinput.get_correl_model(self.oqparam),
         seed=self.oqparam.random_seed)
     return gmfs_by_imt
Пример #11
0
 def execute(self):
     gmfs_by_imt = ground_motion_fields(
         self.rupture, self.sitecol, self.imts, self.gsim,
         self.oqparam.truncation_level,
         self.oqparam.number_of_ground_motion_fields,
         correlation_model=readinput.get_correl_model(self.oqparam),
         seed=self.oqparam.random_seed)
     return gmfs_by_imt
Пример #12
0
    def pre_execute(self):
        """
        Read the precomputed ruptures (or compute them on the fly) and
        prepare some datasets in the datastore.
        """
        super(EventBasedRiskCalculator, self).pre_execute()
        if not self.riskmodel:  # there is no riskmodel, exit early
            self.execute = lambda: None
            self.post_execute = lambda result: None
            return
        oq = self.oqparam
        if self.riskmodel.covs:
            epsilon_sampling = oq.epsilon_sampling
        else:
            epsilon_sampling = 1  # only one ignored epsilon
        correl_model = readinput.get_correl_model(oq)
        gsims_by_col = self.rlzs_assoc.get_gsims_by_col()
        assets_by_site = self.assets_by_site
        # the following is needed to set the asset idx attribute
        self.assetcol = riskinput.build_asset_collection(
            assets_by_site, oq.time_event)
        self.spec_indices = numpy.array([a['asset_ref'] in oq.specific_assets
                                         for a in self.assetcol])

        logging.info('Populating the risk inputs')
        rup_by_tag = sum(self.datastore['sescollection'], AccumDict())
        all_ruptures = [rup_by_tag[tag] for tag in sorted(rup_by_tag)]
        for i, rup in enumerate(all_ruptures):
            rup.ordinal = i
        num_samples = min(len(all_ruptures), epsilon_sampling)
        self.epsilon_matrix = eps = riskinput.make_eps(
            assets_by_site, num_samples, oq.master_seed, oq.asset_correlation)
        logging.info('Generated %d epsilons', num_samples * len(eps))
        self.riskinputs = list(self.riskmodel.build_inputs_from_ruptures(
            self.sitecol.complete, all_ruptures, gsims_by_col,
            oq.truncation_level, correl_model, eps,
            oq.concurrent_tasks or 1))
        logging.info('Built %d risk inputs', len(self.riskinputs))

        # preparing empty datasets
        loss_types = self.riskmodel.loss_types
        self.L = len(loss_types)
        self.R = len(self.rlzs_assoc.realizations)
        self.outs = OUTPUTS
        self.datasets = {}
        # ugly: attaching an attribute needed in the task function
        self.monitor.num_outputs = len(self.outs)
        self.monitor.num_assets = self.count_assets()
        for o, out in enumerate(self.outs):
            self.datastore.hdf5.create_group(out)
            for l, loss_type in enumerate(loss_types):
                for r, rlz in enumerate(self.rlzs_assoc.realizations):
                    key = '/%s/%s' % (loss_type, rlz.uid)
                    if o == AGGLOSS:  # loss tables
                        dset = self.datastore.create_dset(out + key, elt_dt)
                    elif o == SPECLOSS:  # specific losses
                        dset = self.datastore.create_dset(out + key, ela_dt)
                    self.datasets[o, l, r] = dset
Пример #13
0
    def _init_tags(self):
        self.imts = readinput.get_imts(self.oqparam)
        gsim = readinput.get_gsim(self.oqparam)
        trunc_level = getattr(self.oqparam, 'truncation_level', None)
        correl_model = readinput.get_correl_model(self.oqparam)
        n_gmfs = self.oqparam.number_of_ground_motion_fields
        rupture = readinput.get_rupture(self.oqparam)

        self.tags = ['scenario-%010d' % i for i in xrange(n_gmfs)]
        self.computer = GmfComputer(rupture, self.sitecol, self.imts, gsim,
                                    trunc_level, correl_model)
        rnd = random.Random(getattr(self.oqparam, 'random_seed', 42))
        self.tag_seed_pairs = [(tag, rnd.randint(0, calc.MAX_INT))
                               for tag in self.tags]
Пример #14
0
def compute_gmfs_and_curves(ses_ruptures, sitecol, rlzs_assoc, monitor):
    """
    :param ses_ruptures:
        a list of blocks of SESRuptures of the same SESCollection
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :param rlzs_assoc:
        a RlzsAssoc instance
    :param monitor:
        a Monitor instance
    :returns:
        a dictionary (trt_model_id, gsim) -> haz_curves and/or
        (trt_model_id, col_id) -> gmfs
   """
    oq = monitor.oqparam
    # NB: by construction each block is a non-empty list with
    # ruptures of the same col_id and therefore trt_model_id
    col_id = ses_ruptures[0].col_id
    trt_id = rlzs_assoc.csm_info.get_trt_id(col_id)
    gsims = rlzs_assoc.get_gsims_by_col()[col_id]
    trunc_level = oq.truncation_level
    correl_model = readinput.get_correl_model(oq)
    tot_sites = len(sitecol.complete)
    num_sites = len(sitecol)
    gmfs = make_gmfs(ses_ruptures, sitecol, oq.imtls, gsims, trunc_level,
                     correl_model, monitor)
    result = {
        (trt_id, col_id):
        numpy.concatenate(gmfs) if oq.ground_motion_fields else None
    }
    if oq.hazard_curves_from_gmfs:
        with monitor('bulding hazard curves', measuremem=False) as mon:
            duration = oq.investigation_time * oq.ses_per_logic_tree_path * (
                oq.number_of_logic_tree_samples or 1)
            # collect the gmvs by site
            gmvs_by_sid = collections.defaultdict(list)
            for sr, gmf in zip(ses_ruptures, gmfs):
                site_ids = get_site_ids(sr, num_sites)
                for sid, gmv in zip(site_ids, gmf):
                    gmvs_by_sid[sid].append(gmv)
            # build the hazard curves for each GSIM
            for gsim in gsims:
                gs = str(gsim)
                result[trt_id,
                       gs] = to_haz_curves(tot_sites, gmvs_by_sid, gs,
                                           oq.imtls, oq.investigation_time,
                                           duration)
        mon.flush()
    return result
Пример #15
0
    def pre_execute(self):
        """
        Read the precomputed ruptures (or compute them on the fly) and
        prepare some datasets in the datastore.
        """
        super(EventBasedRiskCalculator, self).pre_execute()
        if not self.riskmodel:  # there is no riskmodel, exit early
            self.execute = lambda: None
            self.post_execute = lambda result: None
            return
        oq = self.oqparam
        epsilon_sampling = oq.epsilon_sampling
        correl_model = readinput.get_correl_model(oq)
        gsims_by_col = self.rlzs_assoc.get_gsims_by_col()
        assets_by_site = self.assets_by_site

        logging.info('Populating the risk inputs')
        rup_by_tag = sum(self.datastore['sescollection'], AccumDict())
        all_ruptures = [rup_by_tag[tag] for tag in sorted(rup_by_tag)]
        num_samples = min(len(all_ruptures), epsilon_sampling)
        eps_dict = riskinput.make_eps_dict(
            assets_by_site, num_samples, oq.master_seed, oq.asset_correlation)
        logging.info('Generated %d epsilons', num_samples * len(eps_dict))
        self.epsilon_matrix = numpy.array(
            [eps_dict[a['asset_ref']] for a in self.assetcol])
        self.riskinputs = list(self.riskmodel.build_inputs_from_ruptures(
            self.sitecol.complete, all_ruptures, gsims_by_col,
            oq.truncation_level, correl_model, eps_dict,
            oq.concurrent_tasks or 1))
        logging.info('Built %d risk inputs', len(self.riskinputs))

        # preparing empty datasets
        loss_types = self.riskmodel.get_loss_types()
        self.L = len(loss_types)
        self.R = len(self.rlzs_assoc.realizations)
        self.outs = ['event_loss_table-rlzs']
        if oq.insured_losses:
            self.outs.append('insured_loss_table-rlzs')
        self.datasets = {}
        for o, out in enumerate(self.outs):
            self.datastore.hdf5.create_group(out)
            for l, loss_type in enumerate(loss_types):
                for r, rlz in enumerate(self.rlzs_assoc.realizations):
                    key = '/%s/%s' % (loss_type, rlz.uid)
                    dset = self.datastore.create_dset(out + key, elt_dt)
                    self.datasets[o, l, r] = dset
Пример #16
0
def calc_gmfs_fast(oqparam, sitecol):
    """
    Build all the ground motion fields for the whole site collection in
    a single step.
    """
    max_dist = oqparam.maximum_distance
    correl_model = get_correl_model(oqparam)
    seed = oqparam.random_seed
    imts = get_imts(oqparam)
    [gsim] = get_gsims(oqparam)
    trunc_level = oqparam.truncation_level
    n_gmfs = oqparam.number_of_ground_motion_fields
    rupture = get_rupture(oqparam)
    res = gmf.ground_motion_fields(
        rupture, sitecol, imts, gsim, trunc_level, n_gmfs, correl_model,
        filters.rupture_site_distance_filter(max_dist), seed)
    return {str(imt): matrix for imt, matrix in res.items()}
Пример #17
0
def compute_gmfs_and_curves(ses_ruptures, sitecol, rlzs_assoc, monitor):
    """
    :param ses_ruptures:
        a list of blocks of SESRuptures of the same SESCollection
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :param rlzs_assoc:
        a RlzsAssoc instance
    :param monitor:
        a Monitor instance
    :returns:
        a dictionary (trt_model_id, gsim) -> haz_curves and/or
        (trt_model_id, col_id) -> gmfs
   """
    oq = monitor.oqparam
    # NB: by construction each block is a non-empty list with
    # ruptures of the same col_id and therefore trt_model_id
    col_id = ses_ruptures[0].col_id
    trt_id = rlzs_assoc.csm_info.get_trt_id(col_id)
    gsims = rlzs_assoc.get_gsims_by_col()[col_id]
    trunc_level = oq.truncation_level
    correl_model = readinput.get_correl_model(oq)
    tot_sites = len(sitecol.complete)
    num_sites = len(sitecol)
    gmfs = make_gmfs(ses_ruptures, sitecol, oq.imtls, gsims,
                     trunc_level, correl_model, monitor)
    result = {(trt_id, col_id): numpy.concatenate(gmfs)
              if oq.ground_motion_fields else None}
    if oq.hazard_curves_from_gmfs:
        with monitor('bulding hazard curves', measuremem=False) as mon:
            duration = oq.investigation_time * oq.ses_per_logic_tree_path * (
                oq.number_of_logic_tree_samples or 1)
            # collect the gmvs by site
            gmvs_by_sid = collections.defaultdict(list)
            for sr, gmf in zip(ses_ruptures, gmfs):
                site_ids = get_site_ids(sr, num_sites)
                for sid, gmv in zip(site_ids, gmf):
                    gmvs_by_sid[sid].append(gmv)
            # build the hazard curves for each GSIM
            for gsim in gsims:
                gs = str(gsim)
                result[trt_id, gs] = to_haz_curves(
                    tot_sites, gmvs_by_sid, gs, oq.imtls,
                    oq.investigation_time, duration)
        mon.flush()
    return result
Пример #18
0
def compute_gmfs_and_curves(eb_ruptures, sitecol, rlzs_assoc, monitor):
    """
    :param eb_ruptures:
        a list of blocks of EBRuptures of the same SESCollection
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :param rlzs_assoc:
        a RlzsAssoc instance
    :param monitor:
        a Monitor instance
    :returns:
        a dictionary (trt_model_id, gsim) -> haz_curves and/or
        trt_model_id -> gmfs
   """
    oq = monitor.oqparam
    # NB: by construction each block is a non-empty list with
    # ruptures of the same trt_model_id
    trt_id = eb_ruptures[0].trt_id
    gsims = rlzs_assoc.gsims_by_trt_id[trt_id]
    trunc_level = oq.truncation_level
    correl_model = readinput.get_correl_model(oq)
    tot_sites = len(sitecol.complete)
    gmfa_sids_etags = make_gmfs(
        eb_ruptures, sitecol, oq.imtls, gsims, trunc_level, correl_model,
        monitor)
    result = {trt_id: gmfa_sids_etags if oq.ground_motion_fields else None}
    if oq.hazard_curves_from_gmfs:
        with monitor('bulding hazard curves', measuremem=False):
            duration = oq.investigation_time * oq.ses_per_logic_tree_path

            # collect the gmvs by site
            gmvs_by_sid = collections.defaultdict(list)
            for serial in gmfa_sids_etags:
                gst = gmfa_sids_etags[serial]
                for sid, gmvs in zip(gst.sids, gst.gmfa.T):
                    gmvs_by_sid[sid].extend(gmvs)

            # build the hazard curves for each GSIM
            for gsim in gsims:
                gs = str(gsim)
                result[trt_id, gs] = to_haz_curves(
                    tot_sites, gmvs_by_sid, gs, oq.imtls,
                    oq.investigation_time, duration)
    return result
Пример #19
0
def compute_gmfs_and_curves(ses_ruptures, sitecol, rlzs_assoc, monitor):
    """
    :param ses_ruptures:
        a list of blocks of SESRuptures of the same SESCollection
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :param rlzs_assoc:
        a RlzsAssoc instance
    :param monitor:
        a Monitor instance
    :returns:
        a dictionary trt_model_id -> curves_by_gsim
        where the list of bounding boxes is empty
   """
    oq = monitor.oqparam
    # NB: by construction each block is a non-empty list with
    # ruptures of the same col_id and therefore trt_model_id
    col_id = ses_ruptures[0].col_id
    trt_id = rlzs_assoc.csm_info.get_trt_id(col_id)
    gsims = rlzs_assoc.get_gsims_by_col()[col_id]
    trunc_level = getattr(oq, 'truncation_level', None)
    correl_model = readinput.get_correl_model(oq)
    num_sites = len(sitecol)
    dic = make_gmf_by_tag(
        ses_ruptures, sitecol.complete, oq.imtls, gsims,
        trunc_level, correl_model, monitor)
    zero = zero_curves(num_sites, oq.imtls)
    result = AccumDict({(trt_id, str(gsim)): [dic, zero] for gsim in gsims})
    gmfs = [dic[tag] for tag in sorted(dic)]
    if oq.hazard_curves_from_gmfs:
        duration = oq.investigation_time * oq.ses_per_logic_tree_path * (
            oq.number_of_logic_tree_samples or 1)
        for gsim in gsims:
            gs = str(gsim)
            result[trt_id, gs][1] = to_haz_curves(
                num_sites, gs, gmfs, oq.imtls,
                oq.investigation_time, duration)
    if not oq.ground_motion_fields:
        # reset the gmf_by_tag dictionary to avoid
        # transferring a lot of unused data
        for key in result:
            result[key][0].clear()
    return result
Пример #20
0
def calc_gmfs(oqparam, sitecol):
    """
    Build all the ground motion fields for the whole site collection
    """
    correl_model = get_correl_model(oqparam)
    rnd = random.Random()
    rnd.seed(getattr(oqparam, 'random_seed', 42))
    imts = get_imts(oqparam)
    gsim = get_gsim(oqparam)
    trunc_level = getattr(oqparam, 'truncation_level', None)
    n_gmfs = getattr(oqparam, 'number_of_ground_motion_fields', 1)
    rupture = get_rupture(oqparam)
    computer = gmf.GmfComputer(rupture, sitecol, imts, gsim, trunc_level,
                               correl_model)
    seeds = [rnd.randint(0, MAX_INT) for _ in xrange(n_gmfs)]
    res = AccumDict()  # imt -> gmf
    for seed in seeds:
        for imt, gmfield in computer.compute(seed):
            res += {imt: [gmfield]}
    # res[imt] is a matrix R x N
    return {imt: numpy.array(matrix).T for imt, matrix in res.iteritems()}
Пример #21
0
def _get_gmfs(dstore, serial, eid):
    oq = dstore['oqparam']
    min_iml = event_based.fix_minimum_intensity(oq.minimum_intensity, oq.imtls)
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    sitecol = dstore['sitecol'].complete
    N = len(sitecol.complete)
    rup = dstore['sescollection/' + serial]
    correl_model = readinput.get_correl_model(oq)
    gsims = rlzs_assoc.gsims_by_trt_id[rup.trt_id]
    rlzs = [rlz for gsim in map(str, gsims)
            for rlz in rlzs_assoc[rup.trt_id, gsim]]
    gmf_dt = numpy.dtype([('%03d' % rlz.ordinal, F32) for rlz in rlzs])
    gmfadict = create(event_based.GmfColl,
                      [rup], sitecol, oq.imtls, rlzs_assoc,
                      oq.truncation_level, correl_model, min_iml).by_rlzi()
    for imti, imt in enumerate(oq.imtls):
        gmfa = numpy.zeros(N, gmf_dt)
        for rlzname in gmf_dt.names:
            rlzi = int(rlzname)
            gmvs = get_array(gmfadict[rlzi], eid=eid, imti=imti)['gmv']
            gmfa[rlzname][rup.indices] = gmvs
        yield gmfa, imt
Пример #22
0
def _get_gmfs(dstore, serial, eid):
    oq = dstore['oqparam']
    min_iml = calc.fix_minimum_intensity(oq.minimum_intensity, oq.imtls)
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    sitecol = dstore['sitecol'].complete
    N = len(sitecol.complete)
    rup = dstore['sescollection/' + serial]
    correl_model = readinput.get_correl_model(oq)
    gsims = rlzs_assoc.gsims_by_trt_id[rup.trt_id]
    rlzs = [
        rlz for gsim in map(str, gsims) for rlz in rlzs_assoc[rup.trt_id, gsim]
    ]
    gmf_dt = numpy.dtype([('%03d' % rlz.ordinal, F32) for rlz in rlzs])
    gmfadict = create(calc.GmfColl, [rup], sitecol, oq.imtls, rlzs_assoc,
                      oq.truncation_level, correl_model, min_iml).by_rlzi()
    for imti, imt in enumerate(oq.imtls):
        gmfa = numpy.zeros(N, gmf_dt)
        for rlzname in gmf_dt.names:
            rlzi = int(rlzname)
            gmvs = get_array(gmfadict[rlzi], eid=eid, imti=imti)['gmv']
            gmfa[rlzname][rup.indices] = gmvs
        yield gmfa, imt
Пример #23
0
def _get_gmfs(dstore, etag):
    oq = dstore['oqparam']
    rlzs_assoc = dstore['rlzs_assoc']
    sitecol = dstore['sitecol'].complete
    N = len(sitecol.complete)
    serial = util.get_serial(etag)
    ebrup = dstore['sescollection/' + serial]
    rup_idx = get_rup_idx(ebrup, etag)
    correl_model = readinput.get_correl_model(oq)
    gsims = rlzs_assoc.gsims_by_trt_id[ebrup.trt_id]
    rlzs = [rlz for gsim in map(str, gsims)
            for rlz in rlzs_assoc[ebrup.trt_id, gsim]]
    gmf_dt = numpy.dtype([('%03d' % rlz.ordinal, F32) for rlz in rlzs])
    [gst] = event_based.make_gmfs(
        [ebrup], sitecol, oq.imtls, gsims, oq.truncation_level, correl_model
    ).values()
    for imt in oq.imtls:
        gmfa = numpy.zeros(N, gmf_dt)
        for gsim in map(str, gsims):
            data = gst.gmfa[gsim][imt][rup_idx]
            for rlz in rlzs_assoc[ebrup.trt_id, gsim]:
                gmfa['%03d' % rlz.ordinal][ebrup.indices] = data
        yield gmfa, imt
Пример #24
0
def compute_gmfs_and_curves(eb_ruptures, sitecol, imts, rlzs_assoc, min_iml,
                            monitor):
    """
    :param eb_ruptures:
        a list of blocks of EBRuptures of the same SESCollection
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :param imts:
        a list of IMT string
    :param rlzs_assoc:
        a RlzsAssoc instance
    :param monitor:
        a Monitor instance
    :returns:
        a dictionary (rlzi, imt) -> [gmfarray, haz_curves]
   """
    oq = monitor.oqparam
    # NB: by construction each block is a non-empty list with
    # ruptures of the same trt_model_id
    trunc_level = oq.truncation_level
    correl_model = readinput.get_correl_model(oq)
    gmfadict = create(GmfColl, eb_ruptures, sitecol, imts, rlzs_assoc,
                      trunc_level, correl_model, min_iml, monitor).by_rlzi()
    result = {
        rlzi:
        [gmfadict[rlzi], None] if oq.ground_motion_fields else [None, None]
        for rlzi in gmfadict
    }
    if oq.hazard_curves_from_gmfs:
        with monitor('bulding hazard curves', measuremem=False):
            duration = oq.investigation_time * oq.ses_per_logic_tree_path
            for rlzi in gmfadict:
                gmvs_by_sid = group_array(gmfadict[rlzi], 'sid')
                result[rlzi][POEMAP] = gmvs_to_poe_map(gmvs_by_sid, oq.imtls,
                                                       oq.investigation_time,
                                                       duration)
    return result
Пример #25
0
def _get_gmfs(dstore, etag):
    oq = dstore['oqparam']
    rlzs_assoc = dstore['rlzs_assoc']
    sitecol = dstore['sitecol'].complete
    N = len(sitecol.complete)
    serial = util.get_serial(etag)
    ebrup = dstore['sescollection/' + serial]
    rup_idx = get_rup_idx(ebrup, etag)
    correl_model = readinput.get_correl_model(oq)
    gsims = rlzs_assoc.gsims_by_trt_id[ebrup.trt_id]
    rlzs = [
        rlz for gsim in map(str, gsims)
        for rlz in rlzs_assoc[ebrup.trt_id, gsim]
    ]
    gmf_dt = numpy.dtype([('%03d' % rlz.ordinal, F32) for rlz in rlzs])
    [gst] = event_based.make_gmfs([ebrup], sitecol, oq.imtls, gsims,
                                  oq.truncation_level, correl_model).values()
    for imt in oq.imtls:
        gmfa = numpy.zeros(N, gmf_dt)
        for gsim in map(str, gsims):
            data = gst.gmfa[gsim][imt][rup_idx]
            for rlz in rlzs_assoc[ebrup.trt_id, gsim]:
                gmfa['%03d' % rlz.ordinal][ebrup.indices] = data
        yield gmfa, imt
Пример #26
0
    def execute(self):
        """
        Run the event_based_risk calculator and aggregate the results
        """
        oq = self.oqparam
        correl_model = readinput.get_correl_model(oq)
        self.N = len(self.assetcol)
        self.E = len(self.datastore['etags'])
        logging.info('Populating the risk inputs')
        rlzs_by_tr_id = self.rlzs_assoc.get_rlzs_by_trt_id()
        num_rlzs = {t: len(rlzs) for t, rlzs in rlzs_by_tr_id.items()}
        num_assets = {sid: len(self.assets_by_site[sid])
                      for sid in self.sitecol.sids}
        all_ruptures = []
        for serial in self.datastore['sescollection']:
            rup = self.datastore['sescollection/' + serial]
            rup.set_weight(num_rlzs, num_assets)
            all_ruptures.append(rup)
        all_ruptures.sort(key=operator.attrgetter('serial'))
        if not self.riskmodel.covs:
            # do not generate epsilons
            eps = None
        else:
            eps = riskinput.make_eps(
                self.assets_by_site, self.E, oq.master_seed,
                oq.asset_correlation)
            logging.info('Generated %s epsilons', eps.shape)

        # preparing empty datasets
        loss_types = self.riskmodel.loss_types
        self.C = self.oqparam.loss_curve_resolution
        self.L = L = len(loss_types)
        self.R = R = len(self.rlzs_assoc.realizations)
        self.I = self.oqparam.insured_losses

        # ugly: attaching attributes needed in the task function
        mon = self.monitor
        mon.num_assets = self.count_assets()
        mon.avg_losses = self.oqparam.avg_losses
        mon.asset_loss_table = self.oqparam.asset_loss_table
        mon.insured_losses = self.I
        mon.ses_ratio = (
            oq.risk_investigation_time or oq.investigation_time) / (
                oq.investigation_time * oq.ses_per_logic_tree_path)

        self.N = N = len(self.assetcol)
        self.E = len(self.datastore['etags'])

        # average losses, stored in a composite array of shape N, R
        self.avg_losses = numpy.zeros((N, R), oq.loss_dt())

        self.ass_loss_table = square(L, R, lambda: None)
        self.agg_loss_table = square(L, R, lambda: None)

        self.ela_dt, self.elt_dt = mon.ela_dt, mon.elt_dt = build_el_dtypes(
            self.I)
        for (l, r) in itertools.product(range(L), range(R)):
            lt = loss_types[l]
            if self.oqparam.asset_loss_table:
                self.ass_loss_table[l, r] = self.datastore.create_dset(
                    'ass_loss_table/rlz-%03d/%s' % (r, lt), self.ela_dt)
            self.agg_loss_table[l, r] = self.datastore.create_dset(
                'agg_loss_table/rlz-%03d/%s' % (r, lt), self.elt_dt)

        self.saved = collections.Counter()  # nbytes per HDF5 key
        self.ass_bytes = 0
        self.agg_bytes = 0
        self.gmfbytes = 0
        rlz_ids = getattr(self.oqparam, 'rlz_ids', ())
        if rlz_ids:
            self.rlzs_assoc = self.rlzs_assoc.extract(rlz_ids)

        if not oq.minimum_intensity:
            # infer it from the risk models if not directly set in job.ini
            oq.minimum_intensity = self.riskmodel.get_min_iml()
        min_iml = calc.fix_minimum_intensity(
            oq.minimum_intensity, oq.imtls)
        if min_iml.sum() == 0:
            logging.warn('The GMFs are not filtered: '
                         'you may want to set a minimum_intensity')
        else:
            logging.info('minimum_intensity=%s', oq.minimum_intensity)

        with self.monitor('building riskinputs', autoflush=True):
            riskinputs = self.riskmodel.build_inputs_from_ruptures(
                self.sitecol.complete, all_ruptures, oq.truncation_level,
                correl_model, min_iml, eps, oq.concurrent_tasks or 1)
            # NB: I am using generators so that the tasks are submitted one at
            # the time, without keeping all of the arguments in memory
            tm = starmap(
                self.core_task.__func__,
                ((riskinput, self.riskmodel, self.rlzs_assoc,
                  self.assetcol, self.monitor.new('task'))
                 for riskinput in riskinputs))
        res = tm.reduce(agg=self.agg)
        self.save_data_transfer(tm)
        return res
Пример #27
0
    def pre_execute(self):
        """
        Read the precomputed ruptures (or compute them on the fly) and
        prepare some datasets in the datastore.
        """
        super(EventBasedRiskCalculator, self).pre_execute()
        if not self.riskmodel:  # there is no riskmodel, exit early
            self.execute = lambda: None
            self.post_execute = lambda result: None
            return
        oq = self.oqparam
        correl_model = readinput.get_correl_model(oq)
        self.N = len(self.assetcol)
        self.E = len(self.etags)
        logging.info('Populating the risk inputs')
        all_ruptures = []
        for serial in self.datastore['sescollection']:
            all_ruptures.append(self.datastore['sescollection/' + serial])
        all_ruptures.sort(key=operator.attrgetter('serial'))
        if not self.riskmodel.covs:
            # do not generate epsilons
            eps = FakeMatrix(self.N, self.E)
        else:
            eps = riskinput.make_eps(self.assets_by_site, self.E,
                                     oq.master_seed, oq.asset_correlation)
            logging.info('Generated %s epsilons', eps.shape)

        self.riskinputs = list(
            self.riskmodel.build_inputs_from_ruptures(
                self.sitecol.complete, all_ruptures,
                self.rlzs_assoc.gsims_by_trt_id, oq.truncation_level,
                correl_model, eps, oq.concurrent_tasks or 1))
        logging.info('Built %d risk inputs', len(self.riskinputs))

        # preparing empty datasets
        loss_types = self.riskmodel.loss_types
        self.C = self.oqparam.loss_curve_resolution
        self.L = L = len(loss_types)
        self.R = R = len(self.rlzs_assoc.realizations)
        self.I = self.oqparam.insured_losses

        # ugly: attaching an attribute needed in the task function
        mon = self.monitor
        mon.num_assets = self.count_assets()
        mon.avg_losses = self.oqparam.avg_losses
        mon.asset_loss_table = self.oqparam.asset_loss_table
        mon.insured_losses = self.I

        self.N = N = len(self.assetcol)
        self.E = len(self.datastore['etags'])

        # average losses, stored in a composite array of shape N, R
        multi_avg_dt = self.riskmodel.loss_type_dt(insured=self.I)
        self.avg_losses = numpy.zeros((N, R), multi_avg_dt)

        self.ass_loss_table = square(L, R, lambda: None)
        self.agg_loss_table = square(L, R, lambda: None)

        self.ela_dt, self.elt_dt = mon.ela_dt, mon.elt_dt = build_el_dtypes(
            self.I)
        for (l, r) in itertools.product(range(L), range(R)):
            lt = loss_types[l]
            if self.oqparam.asset_loss_table:
                self.ass_loss_table[l, r] = self.datastore.create_dset(
                    'ass_loss_table/rlz-%03d/%s' % (r, lt), self.ela_dt)
            self.agg_loss_table[l, r] = self.datastore.create_dset(
                'agg_loss_table/rlz-%03d/%s' % (r, lt), self.elt_dt)
Пример #28
0
    def pre_execute(self):
        """
        Read the precomputed ruptures (or compute them on the fly) and
        prepare some datasets in the datastore.
        """
        super(EventBasedRiskCalculator, self).pre_execute()
        if not self.riskmodel:  # there is no riskmodel, exit early
            self.execute = lambda: None
            self.post_execute = lambda result: None
            return
        oq = self.oqparam
        if self.riskmodel.covs:
            epsilon_sampling = oq.epsilon_sampling
        else:
            epsilon_sampling = 1  # only one ignored epsilon
        correl_model = readinput.get_correl_model(oq)
        gsims_by_col = self.rlzs_assoc.get_gsims_by_col()
        assets_by_site = self.assets_by_site
        # the following is needed to set the asset idx attribute
        self.assetcol = riskinput.build_asset_collection(
            assets_by_site, oq.time_event)
        self.spec_indices = numpy.array(
            [a['asset_ref'] in oq.specific_assets for a in self.assetcol])

        logging.info('Populating the risk inputs')
        rup_by_tag = sum(self.datastore['sescollection'], AccumDict())
        all_ruptures = [rup_by_tag[tag] for tag in sorted(rup_by_tag)]
        for i, rup in enumerate(all_ruptures):
            rup.ordinal = i
        num_samples = min(len(all_ruptures), epsilon_sampling)
        self.epsilon_matrix = eps = riskinput.make_eps(assets_by_site,
                                                       num_samples,
                                                       oq.master_seed,
                                                       oq.asset_correlation)
        logging.info('Generated %d epsilons', num_samples * len(eps))
        self.riskinputs = list(
            self.riskmodel.build_inputs_from_ruptures(
                self.sitecol.complete, all_ruptures, gsims_by_col,
                oq.truncation_level, correl_model, eps, oq.concurrent_tasks
                or 1))
        logging.info('Built %d risk inputs', len(self.riskinputs))

        # preparing empty datasets
        loss_types = self.riskmodel.loss_types
        self.L = len(loss_types)
        self.R = len(self.rlzs_assoc.realizations)
        self.outs = OUTPUTS
        self.datasets = {}
        # ugly: attaching an attribute needed in the task function
        self.monitor.num_outputs = len(self.outs)
        self.monitor.num_assets = self.count_assets()
        for o, out in enumerate(self.outs):
            self.datastore.hdf5.create_group(out)
            for l, loss_type in enumerate(loss_types):
                for r, rlz in enumerate(self.rlzs_assoc.realizations):
                    key = '/%s/%s' % (loss_type, rlz.uid)
                    if o == AGGLOSS:  # loss tables
                        dset = self.datastore.create_dset(out + key, elt_dt)
                    elif o == SPECLOSS:  # specific losses
                        dset = self.datastore.create_dset(out + key, ela_dt)
                    self.datasets[o, l, r] = dset
Пример #29
0
    def pre_execute(self):
        """
        Read the precomputed ruptures (or compute them on the fly) and
        prepare some datasets in the datastore.
        """
        super(EventBasedRiskCalculator, self).pre_execute()
        if not self.riskmodel:  # there is no riskmodel, exit early
            self.execute = lambda: None
            self.post_execute = lambda result: None
            return
        oq = self.oqparam
        epsilon_sampling = oq.epsilon_sampling
        correl_model = readinput.get_correl_model(oq)
        gsims_by_col = self.rlzs_assoc.get_gsims_by_col()
        assets_by_site = self.assets_by_site
        # the following is needed to set the asset idx attribute
        self.assetcol = riskinput.build_asset_collection(
            assets_by_site, oq.time_event)

        logging.info('Populating the risk inputs')
        rup_by_tag = sum(self.datastore['sescollection'], AccumDict())
        all_ruptures = [rup_by_tag[tag] for tag in sorted(rup_by_tag)]
        num_samples = min(len(all_ruptures), epsilon_sampling)
        eps_dict = riskinput.make_eps_dict(
            assets_by_site, num_samples, oq.master_seed, oq.asset_correlation)
        logging.info('Generated %d epsilons', num_samples * len(eps_dict))
        self.epsilon_matrix = numpy.array(
            [eps_dict[a['asset_ref']] for a in self.assetcol])
        self.riskinputs = list(self.riskmodel.build_inputs_from_ruptures(
            self.sitecol.complete, all_ruptures, gsims_by_col,
            oq.truncation_level, correl_model, eps_dict,
            oq.concurrent_tasks or 1))
        logging.info('Built %d risk inputs', len(self.riskinputs))

        # preparing empty datasets
        loss_types = self.riskmodel.loss_types
        self.L = len(loss_types)
        self.R = len(self.rlzs_assoc.realizations)
        self.outs = OUTPUTS
        self.datasets = {}
        self.monitor.oqparam = self.oqparam
        # ugly: attaching an attribute needed in the task function
        self.monitor.num_outputs = len(self.outs)
        # attaching two other attributes used in riskinput.gen_outputs
        self.monitor.assets_by_site = self.assets_by_site
        self.monitor.num_assets = N = self.count_assets()
        for o, out in enumerate(self.outs):
            self.datastore.hdf5.create_group(out)
            for l, loss_type in enumerate(loss_types):
                cb = self.riskmodel.curve_builders[l]
                build_curves = len(cb.ratios)
                for r, rlz in enumerate(self.rlzs_assoc.realizations):
                    key = '/%s/rlz-%03d' % (loss_type, rlz.ordinal)
                    if o in (ELT, ILT):  # loss tables
                        dset = self.datastore.create_dset(out + key, elt_dt)
                    else:  # risk curves
                        if not build_curves:
                            continue
                        dset = self.datastore.create_dset(
                            out + key, cb.poes_dt, N)
                    self.datasets[o, l, r] = dset
                if o in (FRC, IRC) and build_curves:
                    grp = self.datastore['%s/%s' % (out, loss_type)]
                    grp.attrs['loss_ratios'] = cb.ratios
Пример #30
0
    def pre_execute(self):
        """
        Read the precomputed ruptures (or compute them on the fly) and
        prepare some datasets in the datastore.
        """
        super(EventBasedRiskCalculator, self).pre_execute()
        if not self.riskmodel:  # there is no riskmodel, exit early
            self.execute = lambda: None
            self.post_execute = lambda result: None
            return
        oq = self.oqparam
        epsilon_sampling = oq.epsilon_sampling
        correl_model = readinput.get_correl_model(oq)
        gsims_by_col = self.rlzs_assoc.get_gsims_by_col()
        assets_by_site = self.assets_by_site
        # the following is needed to set the asset idx attribute
        self.assetcol = riskinput.build_asset_collection(
            assets_by_site, oq.time_event)

        logging.info('Populating the risk inputs')
        rup_by_tag = sum(self.datastore['sescollection'], AccumDict())
        all_ruptures = [rup_by_tag[tag] for tag in sorted(rup_by_tag)]
        num_samples = min(len(all_ruptures), epsilon_sampling)
        eps_dict = riskinput.make_eps_dict(assets_by_site, num_samples,
                                           oq.master_seed,
                                           oq.asset_correlation)
        logging.info('Generated %d epsilons', num_samples * len(eps_dict))
        self.epsilon_matrix = numpy.array(
            [eps_dict[a['asset_ref']] for a in self.assetcol])
        self.riskinputs = list(
            self.riskmodel.build_inputs_from_ruptures(
                self.sitecol.complete, all_ruptures, gsims_by_col,
                oq.truncation_level, correl_model, eps_dict,
                oq.concurrent_tasks or 1))
        logging.info('Built %d risk inputs', len(self.riskinputs))

        # preparing empty datasets
        loss_types = self.riskmodel.loss_types
        self.L = len(loss_types)
        self.R = len(self.rlzs_assoc.realizations)
        self.outs = OUTPUTS
        self.datasets = {}
        self.monitor.oqparam = self.oqparam
        # ugly: attaching an attribute needed in the task function
        self.monitor.num_outputs = len(self.outs)
        # attaching two other attributes used in riskinput.gen_outputs
        self.monitor.assets_by_site = self.assets_by_site
        self.monitor.num_assets = N = self.count_assets()
        for o, out in enumerate(self.outs):
            self.datastore.hdf5.create_group(out)
            for l, loss_type in enumerate(loss_types):
                cb = self.riskmodel.curve_builders[l]
                build_curves = len(cb.ratios)
                for r, rlz in enumerate(self.rlzs_assoc.realizations):
                    key = '/%s/rlz-%03d' % (loss_type, rlz.ordinal)
                    if o in (ELT, ILT):  # loss tables
                        dset = self.datastore.create_dset(out + key, elt_dt)
                    else:  # risk curves
                        if not build_curves:
                            continue
                        dset = self.datastore.create_dset(
                            out + key, cb.poes_dt, N)
                    self.datasets[o, l, r] = dset
                if o in (FRC, IRC) and build_curves:
                    grp = self.datastore['%s/%s' % (out, loss_type)]
                    grp.attrs['loss_ratios'] = cb.ratios
Пример #31
0
    def execute(self):
        """
        Run the event_based_risk calculator and aggregate the results
        """
        oq = self.oqparam
        correl_model = readinput.get_correl_model(oq)
        self.N = len(self.assetcol)
        self.E = len(self.etags)
        logging.info('Populating the risk inputs')
        rlzs_by_tr_id = self.rlzs_assoc.get_rlzs_by_trt_id()
        num_rlzs = {t: len(rlzs) for t, rlzs in rlzs_by_tr_id.items()}
        num_assets = {
            sid: len(self.assets_by_site[sid])
            for sid in self.sitecol.sids
        }
        all_ruptures = []
        for serial in self.datastore['sescollection']:
            rup = self.datastore['sescollection/' + serial]
            rup.set_weight(num_rlzs, num_assets)
            all_ruptures.append(rup)
        all_ruptures.sort(key=operator.attrgetter('serial'))
        if not self.riskmodel.covs:
            # do not generate epsilons
            eps = None
        else:
            eps = riskinput.make_eps(self.assets_by_site, self.E,
                                     oq.master_seed, oq.asset_correlation)
            logging.info('Generated %s epsilons', eps.shape)

        # preparing empty datasets
        loss_types = self.riskmodel.loss_types
        self.C = self.oqparam.loss_curve_resolution
        self.L = L = len(loss_types)
        self.R = R = len(self.rlzs_assoc.realizations)
        self.I = self.oqparam.insured_losses

        # ugly: attaching attributes needed in the task function
        mon = self.monitor
        mon.num_assets = self.count_assets()
        mon.avg_losses = self.oqparam.avg_losses
        mon.asset_loss_table = self.oqparam.asset_loss_table
        mon.insured_losses = self.I
        mon.ses_ratio = (oq.risk_investigation_time or
                         oq.investigation_time) / (oq.investigation_time *
                                                   oq.ses_per_logic_tree_path)

        self.N = N = len(self.assetcol)
        self.E = len(self.datastore['etags'])

        # average losses, stored in a composite array of shape N, R
        multi_avg_dt = self.riskmodel.loss_type_dt(insured=self.I)
        self.avg_losses = numpy.zeros((N, R), multi_avg_dt)

        self.ass_loss_table = square(L, R, lambda: None)
        self.agg_loss_table = square(L, R, lambda: None)

        self.ela_dt, self.elt_dt = mon.ela_dt, mon.elt_dt = build_el_dtypes(
            self.I)
        for (l, r) in itertools.product(range(L), range(R)):
            lt = loss_types[l]
            if self.oqparam.asset_loss_table:
                self.ass_loss_table[l, r] = self.datastore.create_dset(
                    'ass_loss_table/rlz-%03d/%s' % (r, lt), self.ela_dt)
            self.agg_loss_table[l, r] = self.datastore.create_dset(
                'agg_loss_table/rlz-%03d/%s' % (r, lt), self.elt_dt)

        self.saved = collections.Counter()  # nbytes per HDF5 key
        self.ass_bytes = 0
        self.agg_bytes = 0
        self.gmfbytes = 0
        rlz_ids = getattr(self.oqparam, 'rlz_ids', ())
        if rlz_ids:
            self.rlzs_assoc = self.rlzs_assoc.extract(rlz_ids)

        if not oq.minimum_intensity:
            # infer it from the risk models if not directly set in job.ini
            oq.minimum_intensity = self.riskmodel.get_min_iml()
        min_iml = calc.fix_minimum_intensity(oq.minimum_intensity, oq.imtls)
        if min_iml.sum() == 0:
            logging.warn('The GMFs are not filtered: '
                         'you may want to set a minimum_intensity')
        else:
            logging.info('minimum_intensity=%s', oq.minimum_intensity)

        with self.monitor('building riskinputs', autoflush=True):
            riskinputs = self.riskmodel.build_inputs_from_ruptures(
                self.sitecol.complete, all_ruptures, oq.truncation_level,
                correl_model, min_iml, eps, oq.concurrent_tasks or 1)
            # NB: I am using generators so that the tasks are submitted one at
            # the time, without keeping all of the arguments in memory
            tm = starmap(self.core_task.__func__,
                         ((riskinput, self.riskmodel, self.rlzs_assoc,
                           self.assetcol, self.monitor.new('task'))
                          for riskinput in riskinputs))
        res = tm.reduce(agg=self.agg)
        self.save_data_transfer(tm)
        return res