def combine_mean_curves(calc_big, calc_small):
    """
    Combine the hazard curves coming from two different calculations.
    The result will be the hazard curves of calc_big, updated on the sites
    in common with calc_small with the PoEs of calc_small. For instance:
    calc_big = USA, calc_small = California
    """
    dstore_big = datastore.read(calc_big)
    dstore_small = datastore.read(calc_small)
    sitecol_big = dstore_big['sitecol']
    sitecol_small = dstore_small['sitecol']
    site_id_big = {(lon, lat): sid for sid, lon, lat in zip(
        sitecol_big.sids, sitecol_big.lons, sitecol_big.lats)}
    site_id_small = {(lon, lat): sid for sid, lon, lat in zip(
        sitecol_small.sids, sitecol_small.lons, sitecol_small.lats)}
    common = set(site_id_big) & set(site_id_small)
    if not common:
        raise RuntimeError('There are no common sites between calculation '
                           '%d and %d' % (calc_big, calc_small))
    sids_small = [site_id_small[lonlat] for lonlat in common]
    pmap_big = PmapGetter(dstore_big).get_mean()  # USA
    pmap_small = PmapGetter(dstore_big, sids=sids_small).get_mean()  # Cal
    for lonlat in common:
        pmap_big[site_id_big[lonlat]] |= pmap_small.get(
            site_id_small[lonlat], 0)
    out = 'combine_%d_%d.hdf5' % (calc_big, calc_small)
    with hdf5.File(out, 'w') as h5:
        h5['hcurves/mean'] = pmap_big
        h5['oqparam'] = dstore_big['oqparam']
        h5['sitecol'] = dstore_big['sitecol']
    print('Generated %s' % out)
Esempio n. 2
0
def combine_mean_curves(calc1, calc2):
    """
    Combine the hazard curves coming from two different calculations.
    The result will be the hazard curves of calc1, updated on the sites
    in common with calc2 with the PoEs of calc2.
    """
    dstore1 = datastore.read(calc1)
    dstore2 = datastore.read(calc2)
    sitecol1 = dstore1['sitecol']
    sitecol2 = dstore2['sitecol']
    site_id1 = {
        (lon, lat): sid
        for sid, lon, lat in zip(sitecol1.sids, sitecol1.lons, sitecol1.lats)
    }
    site_id2 = {
        (lon, lat): sid
        for sid, lon, lat in zip(sitecol2.sids, sitecol2.lons, sitecol2.lats)
    }
    common = set(site_id1) & set(site_id2)
    if not common:
        raise RuntimeError('There are no common sites between calculation '
                           '%d and %d' % (calc1, calc2))
    sids2 = [site_id2[lonlat] for lonlat in common]
    pmap = PmapGetter(dstore1).get_mean()
    pmap2 = PmapGetter(dstore1, sids=sids2).get_mean()
    for lonlat in common:
        pmap[site_id1[lonlat]] |= pmap2[site_id2[lonlat]]
    out = 'combine_%d_%d.hdf5' % (calc1, calc2)
    with hdf5.File(out, 'w') as h5:
        h5['hcurves/mean'] = pmap
    print('Generated %s' % out)
Esempio n. 3
0
 def save_hmaps(self):
     """
     Save hazard maps generated from the hazard curves
     """
     oq = self.oqparam
     if oq.poes:
         mon = self.monitor('computing hazard maps')
         logging.info('Computing hazard maps for PoEs=%s', oq.poes)
         with mon:
             N = len(self.sitecol.complete)
             ct = oq.concurrent_tasks or 1
             if 'hcurves' in self.datastore:
                 kinds = self.datastore['hcurves']
                 hmaps_dt = numpy.dtype(
                     [('%s-%s' % (imt, poe), F32)
                      for imt in oq.imtls for poe in oq.poes])
                 for kind in kinds:
                     self.datastore.create_dset(
                         'hmaps/' + kind, hmaps_dt, (N,), fillvalue=None)
                 allargs = []
                 for slc in general.split_in_slices(N, ct):
                     hcurves_by_kind = {
                         kind: self.datastore['hcurves/' + kind][slc]
                         for kind in kinds}
                     allargs.append((hcurves_by_kind, slc,
                                     oq.imtls, oq.poes, mon))
                 for dic, slc in Starmap(build_hmaps, allargs):
                     for kind, hmaps in dic.items():
                         self.datastore['hmaps/' + kind][slc] = hmaps
             else:  # single realization
                 pg = PmapGetter(self.datastore, self.rlzs_assoc)
                 self.datastore['hmaps/mean'] = calc.make_hmap_array(
                     pg.get_mean(), oq.imtls, oq.poes, N)
Esempio n. 4
0
def combine_mean_curves(calc_big, calc_small):
    """
    Combine the hazard curves coming from two different calculations.
    The result will be the hazard curves of calc_big, updated on the sites
    in common with calc_small with the PoEs of calc_small. For instance:
    calc_big = USA, calc_small = California
    """
    dstore_big = datastore.read(calc_big)
    dstore_small = datastore.read(calc_small)
    sitecol_big = dstore_big['sitecol']
    sitecol_small = dstore_small['sitecol']
    site_id_big = {(lon, lat): sid
                   for sid, lon, lat in zip(sitecol_big.sids, sitecol_big.lons,
                                            sitecol_big.lats)}
    site_id_small = {
        (lon, lat): sid
        for sid, lon, lat in zip(sitecol_small.sids, sitecol_small.lons,
                                 sitecol_small.lats)
    }
    common = set(site_id_big) & set(site_id_small)
    if not common:
        raise RuntimeError('There are no common sites between calculation '
                           '%d and %d' % (calc_big, calc_small))
    sids_small = [site_id_small[lonlat] for lonlat in common]
    pmap_big = PmapGetter(dstore_big).get_mean()  # USA
    pmap_small = PmapGetter(dstore_big, sids=sids_small).get_mean()  # Cal
    for lonlat in common:
        pmap_big[site_id_big[lonlat]] |= pmap_small.get(
            site_id_small[lonlat], 0)
    out = 'combine_%d_%d.hdf5' % (calc_big, calc_small)
    with hdf5.File(out, 'w') as h5:
        h5['hcurves/mean'] = pmap_big
        h5['oqparam'] = dstore_big['oqparam']
        h5['sitecol'] = dstore_big['sitecol']
    print('Generated %s' % out)
Esempio n. 5
0
def export_uhs_xml(ekey, dstore):
    oq = dstore['oqparam']
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    pgetter = PmapGetter(dstore, rlzs_assoc)
    sitemesh = get_mesh(dstore['sitecol'].complete)
    key, kind, fmt = get_kkf(ekey)
    fnames = []
    periods = [imt for imt in oq.imtls if hasattr(imt, 'period')]
    for kind, hcurves in pgetter.items(kind):
        metadata = get_metadata(rlzs_assoc.realizations, kind)
        _, periods = calc.get_imts_periods(oq.imtls)
        uhs = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(sitemesh))
        for poe in oq.poes:
            fname = hazard_curve_name(dstore, (key, fmt), kind + '-%s' % poe,
                                      rlzs_assoc)
            writer = hazard_writers.UHSXMLWriter(
                fname,
                periods=periods,
                poe=poe,
                investigation_time=oq.investigation_time,
                **metadata)
            data = []
            for site, curve in zip(sitemesh, uhs[str(poe)]):
                data.append(UHS(curve, Location(site)))
            writer.serialize(data)
            fnames.append(fname)
    return sorted(fnames)
Esempio n. 6
0
def compare_mean_curves(calc_ref, calc, nsigma=3):
    """
    Compare the hazard curves coming from two different calculations.
    """
    dstore_ref = datastore.read(calc_ref)
    dstore = datastore.read(calc)
    imtls = dstore_ref['oqparam'].imtls
    if dstore['oqparam'].imtls != imtls:
        raise RuntimeError('The IMTs and levels are different between '
                           'calculation %d and %d' % (calc_ref, calc))
    sitecol_ref = dstore_ref['sitecol']
    sitecol = dstore['sitecol']
    site_id_ref = {(lon, lat): sid
                   for sid, lon, lat in zip(sitecol_ref.sids, sitecol_ref.lons,
                                            sitecol_ref.lats)}
    site_id = {
        (lon, lat): sid
        for sid, lon, lat in zip(sitecol.sids, sitecol.lons, sitecol.lats)
    }
    common = set(site_id_ref) & set(site_id)
    if not common:
        raise RuntimeError('There are no common sites between calculation '
                           '%d and %d' % (calc_ref, calc))
    pmap_ref = PmapGetter(dstore_ref,
                          sids=[site_id_ref[lonlat]
                                for lonlat in common]).get_mean()
    pmap = PmapGetter(dstore,
                      sids=[site_id[lonlat] for lonlat in common]).get_mean()
    for lonlat in common:
        mean, std = pmap[site_id[lonlat]].array.T  # shape (2, N)
        mean_ref, std_ref = pmap_ref[site_id_ref[lonlat]].array.T
        err = numpy.sqrt(std**2 + std_ref**2)
        for imt in imtls:
            sl = imtls(imt)
            ok = (numpy.abs(mean[sl] - mean_ref[sl]) < nsigma * err[sl]).all()
            if not ok:
                md = (numpy.abs(mean[sl] - mean_ref[sl])).max()
                plt.title('point=%s, imt=%s, maxdiff=%.2e' % (lonlat, imt, md))
                plt.loglog(imtls[imt],
                           mean_ref[sl] + std_ref[sl],
                           label=str(calc_ref),
                           color='black')
                plt.loglog(imtls[imt],
                           mean_ref[sl] - std_ref[sl],
                           color='black')
                plt.loglog(imtls[imt],
                           mean[sl] + std[sl],
                           label=str(calc),
                           color='red')
                plt.loglog(imtls[imt], mean[sl] - std[sl], color='red')
                plt.legend()
                plt.show()
Esempio n. 7
0
    def test_case_11(self):
        self.assert_curves_ok([
            'hazard_curve-mean.csv', 'hazard_curve-smltp_b1_b2-gsimltp_b1.csv',
            'hazard_curve-smltp_b1_b3-gsimltp_b1.csv',
            'hazard_curve-smltp_b1_b4-gsimltp_b1.csv',
            'quantile_curve-0.1.csv', 'quantile_curve-0.9.csv'
        ], case_11.__file__)

        # checking PmapGetter.get_pcurve
        pgetter = PmapGetter(self.calc.datastore, self.calc.weights)
        poes = pgetter.get_hcurves(pgetter.init())[0]
        mean = self.calc.datastore.sel('hcurves-stats', stat='mean', sid=0)
        mean2 = poes.T @ numpy.array([w['weight'] for w in self.calc.weights])
        aac(mean2.flat, mean.flat)
        check_disagg_by_src(self.calc.datastore)
Esempio n. 8
0
def export_hcurves_xml(ekey, dstore):
    key, kind, fmt = get_kkf(ekey)
    len_ext = len(fmt) + 1
    oq = dstore['oqparam']
    sitemesh = get_mesh(dstore['sitecol'])
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    fnames = []
    writercls = hazard_writers.HazardCurveXMLWriter
    for kind, hcurves in PmapGetter(dstore, rlzs_assoc).items(kind):
        if hasattr(hcurves, 'array'):
            hcurves = hcurves.array[:, 0]
        if kind.startswith('rlz-'):
            rlz = rlzs_assoc.realizations[int(kind[4:])]
            smlt_path = '_'.join(rlz.sm_lt_path)
            gsimlt_path = rlz.gsim_rlz.uid
        else:
            smlt_path = ''
            gsimlt_path = ''
        name = hazard_curve_name(dstore, ekey, kind, rlzs_assoc)
        for im in oq.imtls:
            slc = oq.imtls(im)
            imt = from_string(im)
            fname = name[:-len_ext] + '-' + imt.name + '.' + fmt
            data = [HazardCurve(Location(site), poes[slc])
                    for site, poes in zip(sitemesh, hcurves)]
            writer = writercls(fname,
                               investigation_time=oq.investigation_time,
                               imls=oq.imtls[im], imt=imt.name,
                               sa_period=getattr(imt, 'period', None) or None,
                               sa_damping=getattr(imt, 'damping', None),
                               smlt_path=smlt_path, gsimlt_path=gsimlt_path)
            writer.serialize(data)
            fnames.append(fname)
    return sorted(fnames)
Esempio n. 9
0
def export_hmaps_xml_json(ekey, dstore):
    key, kind, fmt = get_kkf(ekey)
    oq = dstore['oqparam']
    sitecol = dstore['sitecol']
    sitemesh = get_mesh(sitecol)
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    fnames = []
    writercls = hazard_writers.HazardMapXMLWriter
    nsites = len(sitemesh)
    for kind, hcurves in PmapGetter(dstore, rlzs_assoc).items():
        hmaps = calc.make_hmap_array(hcurves, oq.imtls, oq.poes, nsites)
        if kind.startswith('rlz-'):
            rlz = rlzs_assoc.realizations[int(kind[4:])]
            smlt_path = '_'.join(rlz.sm_lt_path)
            gsimlt_path = rlz.gsim_rlz.uid
        else:
            smlt_path = ''
            gsimlt_path = ''
        for imt in oq.imtls:
            for poe in oq.poes:
                suffix = '-%s-%s' % (poe, imt)
                fname = hazard_curve_name(
                    dstore, ekey, kind + suffix, rlzs_assoc)
                data = [HazardMap(site[0], site[1], hmap['%s-%s' % (imt, poe)])
                        for site, hmap in zip(sitemesh, hmaps)]
                writer = writercls(
                    fname, investigation_time=oq.investigation_time,
                    imt=imt, poe=poe,
                    smlt_path=smlt_path, gsimlt_path=gsimlt_path)
                writer.serialize(data)
                fnames.append(fname)
    return sorted(fnames)
Esempio n. 10
0
def export_hcurves_xml_json(ekey, dstore):
    key, kind, fmt = get_kkf(ekey)
    len_ext = len(fmt) + 1
    oq = dstore['oqparam']
    sitemesh = get_mesh(dstore['sitecol'])
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    fnames = []
    writercls = hazard_writers.HazardCurveXMLWriter
    for kind, hcurves in PmapGetter(dstore).items(kind):
        if kind.startswith('rlz-'):
            rlz = rlzs_assoc.realizations[int(kind[4:])]
            smlt_path = '_'.join(rlz.sm_lt_path)
            gsimlt_path = rlz.gsim_rlz.uid
        else:
            smlt_path = ''
            gsimlt_path = ''
        curves = hcurves.convert(oq.imtls, len(sitemesh))
        name = hazard_curve_name(dstore, ekey, kind, rlzs_assoc)
        for imt in oq.imtls:
            imtype, sa_period, sa_damping = from_string(imt)
            fname = name[:-len_ext] + '-' + imt + '.' + fmt
            data = [HazardCurve(Location(site), poes[imt])
                    for site, poes in zip(sitemesh, curves)]
            writer = writercls(fname,
                               investigation_time=oq.investigation_time,
                               imls=oq.imtls[imt], imt=imtype,
                               sa_period=sa_period, sa_damping=sa_damping,
                               smlt_path=smlt_path, gsimlt_path=gsimlt_path)
            writer.serialize(data)
            fnames.append(fname)
    return sorted(fnames)
Esempio n. 11
0
def export_hcurves_csv(ekey, dstore):
    """
    Exports the hazard curves into several .csv files

    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    oq = dstore['oqparam']
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    sitecol = dstore['sitecol']
    sitemesh = get_mesh(sitecol)
    key, kind, fmt = get_kkf(ekey)
    fnames = []
    if oq.poes:
        pdic = DictArray({imt: oq.poes for imt in oq.imtls})
    for kind, hcurves in PmapGetter(dstore, rlzs_assoc).items(kind):
        fname = hazard_curve_name(dstore, (key, fmt), kind, rlzs_assoc)
        comment = _comment(rlzs_assoc, kind, oq.investigation_time)
        if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra:
            uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes,
                                       len(sitemesh))
            writers.write_csv(fname,
                              util.compose_arrays(sitemesh, uhs_curves),
                              comment=comment)
            fnames.append(fname)
        elif key == 'hmaps' and oq.poes and oq.hazard_maps:
            hmap = dstore['hmaps/' + kind].value
            fnames.extend(
                export_hmaps_csv(ekey, fname, sitemesh, hmap, pdic, comment))
        elif key == 'hcurves':
            fnames.extend(
                export_hcurves_by_imt_csv(ekey, kind, rlzs_assoc, fname,
                                          sitecol, hcurves, oq))
    return sorted(fnames)
Esempio n. 12
0
def export_uhs_np(ekey, dstore):
    oq = dstore['oqparam']
    mesh = get_mesh(dstore['sitecol'])
    fname = dstore.export_path('%s.%s' % ekey)
    dic = {}
    for kind, hcurves in PmapGetter(dstore).items():
        dic[kind] = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(mesh))
    save_np(fname, dic, mesh, investigation_time=oq.investigation_time)
    return [fname]
Esempio n. 13
0
 def save_hmaps(self):
     """
     Save hazard maps generated from the hazard curves
     """
     oq = self.oqparam
     if oq.poes:
         logging.info('Computing hazard maps for PoEs=%s', oq.poes)
         with self.monitor('computing hazard maps',
                           autoflush=True, measuremem=True):
             N = len(self.sitecol.complete)
             if 'hcurves' in self.datastore:
                 # TODO: we could parallelize this branch
                 for kind in self.datastore['hcurves']:
                     self.datastore['hmaps/' + kind] = calc.make_hmap_array(
                         self.datastore['hcurves/' + kind],
                         oq.imtls, oq.poes, N)
             else:  # single realization
                 pg = PmapGetter(self.datastore, self.rlzs_assoc)
                 self.datastore['hmaps/mean'] = calc.make_hmap_array(
                     pg.get_mean(), oq.imtls, oq.poes, N)
Esempio n. 14
0
 def _gen_riskinputs(self, kind, eps, num_events):
     rinfo_dt = numpy.dtype([('sid', U16), ('num_assets', U16)])
     rinfo = []
     assets_by_site = self.assetcol.assets_by_site()
     dstore = self.can_read_parent() or self.datastore
     for sid, assets in enumerate(assets_by_site):
         if len(assets) == 0:
             continue
         # build the riskinputs
         if kind == 'poe':  # hcurves, shape (R, N)
             getter = PmapGetter(dstore, self.rlzs_assoc, [sid])
             getter.num_rlzs = self.R
         else:  # gmf
             getter = GmfDataGetter(dstore, [sid], self.R)
         if dstore is self.datastore:
             # read the hazard data in the controller node
             getter.init()
         else:
             # the datastore must be closed to avoid the HDF5 fork bug
             assert dstore.hdf5 == (), '%s is not closed!' % dstore
         for block in general.block_splitter(
                 assets, self.oqparam.assets_per_site_limit):
             # dictionary of epsilons for the reduced assets
             reduced_eps = {
                 ass.ordinal: eps[ass.ordinal]
                 for ass in block if eps is not None and len(eps)
             }
             yield riskinput.RiskInput(getter, [block], reduced_eps)
         rinfo.append((sid, len(block)))
         if len(block) >= TWO16:
             logging.error('There are %d assets on site #%d!', len(block),
                           sid)
     self.datastore['riskinput_info'] = numpy.array(rinfo, rinfo_dt)
Esempio n. 15
0
 def _gen_riskinputs(self, kind, eps, num_events):
     assets_by_site = self.assetcol.assets_by_site()
     dstore = self.can_read_parent() or self.datastore
     for sid, assets in enumerate(assets_by_site):
         if len(assets) == 0:
             continue
         # build the riskinputs
         if kind == 'poe':  # hcurves, shape (R, N)
             getter = PmapGetter(dstore, self.rlzs_assoc, [sid])
             getter.num_rlzs = self.R
         else:  # gmf
             getter = GmfDataGetter(dstore, [sid], self.R,
                                    self.oqparam.imtls)
         if dstore is self.datastore:
             # read the hazard data in the controller node
             getter.init()
         else:
             # the datastore must be closed to avoid the HDF5 fork bug
             assert dstore.hdf5 == (), '%s is not closed!' % dstore
         for block in general.block_splitter(assets, 1000):
             # dictionary of epsilons for the reduced assets
             reduced_eps = {
                 ass.ordinal: eps[ass.ordinal]
                 for ass in block if eps is not None and len(eps)
             }
             yield riskinput.RiskInput(getter, [block], reduced_eps)
Esempio n. 16
0
def export_hmaps_np(ekey, dstore):
    oq = dstore['oqparam']
    sitecol = dstore['sitecol']
    mesh = get_mesh(sitecol)
    pdic = DictArray({imt: oq.poes for imt in oq.imtls})
    fname = dstore.export_path('%s.%s' % ekey)
    dic = {}
    for kind, hcurves in PmapGetter(dstore).items():
        hmap = calc.make_hmap(hcurves, oq.imtls, oq.poes)
        dic[kind] = calc.convert_to_array(hmap, len(mesh), pdic)
    save_np(fname, dic, mesh, ('vs30', F32, sitecol.vs30),
            investigation_time=oq.investigation_time)
    return [fname]
Esempio n. 17
0
 def build_riskinputs(self, kind, eps=None, num_events=0):
     """
     :param kind:
         kind of hazard getter, can be 'poe' or 'gmf'
     :param eps:
         a matrix of epsilons (or None)
     :param num_events:
         how many events there are
     :returns:
         a list of RiskInputs objects, sorted by IMT.
     """
     logging.info('There are %d realizations', self.R)
     imtls = self.oqparam.imtls
     if not set(self.oqparam.risk_imtls) & set(imtls):
         rsk = ', '.join(self.oqparam.risk_imtls)
         haz = ', '.join(imtls)
         raise ValueError('The IMTs in the risk models (%s) are disjoint '
                          "from the IMTs in the hazard (%s)" % (rsk, haz))
     num_tasks = self.oqparam.concurrent_tasks or 1
     if not hasattr(self, 'assetcol'):
         self.assetcol = self.datastore['assetcol']
     self.riskmodel.taxonomy = self.assetcol.tagcol.taxonomy
     assets_by_site = self.assetcol.assets_by_site()
     with self.monitor('building riskinputs', autoflush=True):
         riskinputs = []
         sid_weight_pairs = [(sid, len(assets))
                             for sid, assets in enumerate(assets_by_site)]
         blocks = general.split_in_blocks(sid_weight_pairs,
                                          num_tasks,
                                          weight=operator.itemgetter(1))
         dstore = self.can_read_parent() or self.datastore
         for block in blocks:
             sids = numpy.array([sid for sid, _weight in block])
             reduced_assets = assets_by_site[sids]
             # dictionary of epsilons for the reduced assets
             reduced_eps = {}
             for assets in reduced_assets:
                 for ass in assets:
                     if eps is not None and len(eps):
                         reduced_eps[ass.ordinal] = eps[ass.ordinal]
             # build the riskinputs
             if kind == 'poe':  # hcurves, shape (R, N)
                 getter = PmapGetter(dstore, sids, self.rlzs_assoc)
                 getter.num_rlzs = self.R
             else:  # gmf
                 getter = GmfDataGetter(dstore, sids, self.R, num_events)
             if dstore is self.datastore:
                 # read the hazard data in the controller node
                 logging.info('Reading hazard')
                 getter.init()
             else:
                 # the datastore must be closed to avoid the HDF5 fork bug
                 assert dstore.hdf5 == (), '%s is not closed!' % dstore
             ri = riskinput.RiskInput(getter, reduced_assets, reduced_eps)
             if ri.weight > 0:
                 riskinputs.append(ri)
         assert riskinputs
         logging.info('Built %d risk inputs', len(riskinputs))
         return riskinputs
Esempio n. 18
0
 def _gen_riskinputs(self, kind, eps, num_events):
     num_tasks = self.oqparam.concurrent_tasks or 1
     assets_by_site = self.assetcol.assets_by_site()
     if kind == 'poe':
         indices = None
     else:
         indices = self.datastore['gmf_data/indices'].value
     dstore = self.can_read_parent() or self.datastore
     sid_weight = []
     for sid, assets in enumerate(assets_by_site):
         if len(assets) == 0:
             continue
         elif indices is None:
             weight = len(assets)
         else:
             idx = indices[sid]
             if indices.dtype.names:  # engine < 3.2
                 num_gmfs = sum(stop - start for start, stop in idx)
             else:  # engine >= 3.2
                 num_gmfs = (idx[1] - idx[0]).sum()
             weight = len(assets) * (num_gmfs or 1)
         sid_weight.append((sid, weight))
     for block in general.split_in_blocks(sid_weight,
                                          num_tasks,
                                          weight=operator.itemgetter(1)):
         sids = numpy.array([sid for sid, _weight in block])
         reduced_assets = assets_by_site[sids]
         # dictionary of epsilons for the reduced assets
         reduced_eps = {}
         for assets in reduced_assets:
             for ass in assets:
                 if eps is not None and len(eps):
                     reduced_eps[ass.ordinal] = eps[ass.ordinal]
         # build the riskinputs
         if kind == 'poe':  # hcurves, shape (R, N)
             getter = PmapGetter(dstore, self.rlzs_assoc, sids)
             getter.num_rlzs = self.R
         else:  # gmf
             getter = GmfDataGetter(dstore, sids, self.R, num_events,
                                    self.oqparam.imtls)
         if dstore is self.datastore:
             # read the hazard data in the controller node
             getter.init()
         else:
             # the datastore must be closed to avoid the HDF5 fork bug
             assert dstore.hdf5 == (), '%s is not closed!' % dstore
         ri = riskinput.RiskInput(getter, reduced_assets, reduced_eps)
         ri.weight = block.weight
         yield ri
Esempio n. 19
0
def export_hmaps_xml_json(ekey, dstore):
    key, kind, fmt = get_kkf(ekey)
    if fmt == 'geojson':
        logging.warn('The geojson exporters will be removed soon')
    oq = dstore['oqparam']
    sitecol = dstore['sitecol']
    sitemesh = get_mesh(sitecol)
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    fnames = []
    writercls = (hazard_writers.HazardMapGeoJSONWriter
                 if fmt == 'geojson' else
                 hazard_writers.HazardMapXMLWriter)
    pdic = DictArray({imt: oq.poes for imt in oq.imtls})
    nsites = len(sitemesh)
    for kind, hcurves in PmapGetter(dstore).items():
        hmaps = calc.make_hmap(
            hcurves, oq.imtls, oq.poes).convert(pdic, nsites)
        if kind.startswith('rlz-'):
            rlz = rlzs_assoc.realizations[int(kind[4:])]
            smlt_path = '_'.join(rlz.sm_lt_path)
            gsimlt_path = rlz.gsim_rlz.uid
        else:
            smlt_path = ''
            gsimlt_path = ''
        for imt in oq.imtls:
            for j, poe in enumerate(oq.poes):
                suffix = '-%s-%s' % (poe, imt)
                fname = hazard_curve_name(
                    dstore, ekey, kind + suffix, rlzs_assoc)
                data = [HazardMap(site[0], site[1], _extract(hmap, imt, j))
                        for site, hmap in zip(sitemesh, hmaps)]
                writer = writercls(
                    fname, investigation_time=oq.investigation_time,
                    imt=imt, poe=poe,
                    smlt_path=smlt_path, gsimlt_path=gsimlt_path)
                writer.serialize(data)
                fnames.append(fname)
    return sorted(fnames)