Exemple #1
0
def export_gmf_spec(ekey, dstore, spec):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    :param spec: a string specifying what to export exactly
    """
    oq = dstore['oqparam']
    eids = numpy.array([int(rid) for rid in spec.split(',')])
    sitemesh = dstore['sitemesh']
    writer = writers.CsvWriter(fmt='%.5f')
    etags = dstore['etags']
    if 'scenario' in oq.calculation_mode:
        _, gmfs_by_trt_gsim = base.get_gmfs(dstore)
        gsims = sorted(gsim for trt, gsim in gmfs_by_trt_gsim)
        imts = gmfs_by_trt_gsim[0, gsims[0]].dtype.names
        gmf_dt = numpy.dtype([(str(gsim), F32) for gsim in gsims])
        for eid in eids:
            etag = etags[eid]
            for imt in imts:
                gmfa = numpy.zeros(len(sitemesh), gmf_dt)
                for gsim in gsims:
                    gmfa[str(gsim)] = gmfs_by_trt_gsim[0, gsim][imt][:, eid]
                dest = dstore.export_path('gmf-%s-%s.csv' % (etag, imt))
                data = util.compose_arrays(sitemesh, gmfa)
                writer.save(data, dest)
    else:  # event based
        for eid in eids:
            etag = etags[eid]
            for gmfa, imt in _get_gmfs(dstore, util.get_serial(etag), eid):
                dest = dstore.export_path('gmf-%s-%s.csv' % (etag, imt))
                data = util.compose_arrays(sitemesh, gmfa)
                writer.save(data, dest)
    return writer.getsaved()
Exemple #2
0
def export_hcurves_csv(ekey, dstore):
    """
    Exports the hazard curves into several .csv files

    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    oq = dstore['oqparam']
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    sitecol = dstore['sitecol']
    sitemesh = dstore['sitemesh']
    key, fmt = ekey
    fnames = []
    items = dstore['hmaps' if key == 'uhs' else key].items()
    for kind, hcurves in sorted(items):
        fname = hazard_curve_name(
            dstore, ekey, kind, rlzs_assoc, oq.number_of_logic_tree_samples)
        if key == 'uhs':
            uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes)
            write_csv(fname, util.compose_arrays(sitemesh, uhs_curves))
        elif key == 'hmaps':
            write_csv(fname, util.compose_arrays(sitemesh, hcurves))
        else:
            export_hazard_curves_csv(ekey, fname, sitecol, hcurves, oq.imtls)
        fnames.append(fname)
    return sorted(fnames)
Exemple #3
0
def export_hcurves_csv(ekey, dstore):
    """
    Exports the hazard curves into several .csv files

    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    oq = dstore['oqparam']
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    sitecol = dstore['sitecol']
    sitemesh = get_mesh(sitecol)
    key, fmt = ekey
    fnames = []
    items = dstore['hmaps' if key == 'uhs' else key].items()
    for kind, hcurves in sorted(items):
        fname = hazard_curve_name(dstore, ekey, kind, rlzs_assoc)
        if key == 'uhs':
            uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes)
            write_csv(fname, util.compose_arrays(sitemesh, uhs_curves))
            fnames.append(fname)
        elif key == 'hmaps':
            write_csv(fname, util.compose_arrays(sitemesh, hcurves))
            fnames.append(fname)
        else:
            if export.from_db:  # called by export_from_db
                fnames.extend(
                    export_hcurves_by_imt_csv(
                        ekey, fname, sitecol, hcurves, oq.imtls))
            else:  # when exporting directly from the datastore
                fnames.extend(
                    export_hazard_curves_csv(
                        ekey, fname, sitecol, hcurves, oq.imtls))

    return sorted(fnames)
Exemple #4
0
def export_gmf_scenario_hdf5(ekey, dstore):
    # compute the GMFs on the fly from the stored rupture (if any)
    oq = dstore['oqparam']
    if 'scenario' not in oq.calculation_mode:
        logging.warn('GMF export not implemented for %s', oq.calculation_mode)
        return []
    sitemesh = get_mesh(dstore['sitecol'], complete=False)
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    gsims = rlzs_assoc.gsims_by_grp_id[0]  # there is a single grp_id
    E = oq.number_of_ground_motion_fields
    correl_model = oq.get_correl_model()
    computer = gmf.GmfComputer(
            dstore['rupture'], dstore['sitecol'], oq.imtls, gsims,
            oq.truncation_level, correl_model)
    fname = dstore.export_path('%s.%s' % ekey)
    gmf_dt = numpy.dtype([('%s-%03d' % (imt, eid), F32) for imt in oq.imtls
                          for eid in range(E)])
    imts = list(oq.imtls)
    with hdf5.File(fname, 'w') as f:
        for gsim in gsims:
            arr = computer.compute(gsim, E, oq.random_seed)
            I, S, E = arr.shape  # #IMTs, #sites, #events
            gmfa = numpy.zeros(S, gmf_dt)
            for imti in range(I):
                for eid in range(E):
                    field = '%s-%03d' % (imts[imti], eid)
                    gmfa[field] = arr[imti, :, eid]
            f[str(gsim)] = util.compose_arrays(sitemesh, gmfa)
    return [fname]
Exemple #5
0
def convert_to_array(pmap, sitemesh, imtls):
    """
    Convert the probability map into a composite array with header
    of the form PGA-0.1, PGA-0.2 ...

    :param pmap: probability map
    :param sitemesh: mesh of N sites
    :param imtls: a DictArray with IMT and levels
    :returns: a composite array of lenght N
    """
    nsites = len(sitemesh)
    lst = []
    # build the export dtype, of the form PGA-0.1, PGA-0.2 ...
    for imt, imls in imtls.items():
        for iml in imls:
            lst.append(('%s-%s' % (imt, iml), F64))
    curves = numpy.zeros(nsites, numpy.dtype(lst))
    for sid, pcurve in pmap.items():
        curve = curves[sid]
        idx = 0
        for imt, imls in imtls.items():
            for iml in imls:
                curve['%s-%s' % (imt, iml)] = pcurve.array[idx]
                idx += 1
    return util.compose_arrays(sitemesh, curves)
Exemple #6
0
def view_task_info(token, dstore):
    """
    Display statistical information about the tasks performance.
    It is possible to get full information about a specific task
    with a command like this one, for a classical calculation::

      $ oq show task_info:classical
    """
    args = token.split(':')[1:]  # called as task_info:task_name
    if args:
        [task] = args
        array = dstore['task_info/' + task][()]
        rduration = array['duration'] / array['weight']
        data = util.compose_arrays(rduration, array, 'rduration')
        data.sort(order='duration')
        return rst_table(data)

    data = ['operation-duration mean stddev min max outputs'.split()]
    for task in dstore['task_info']:
        val = dstore['task_info/' + task]['duration']
        if len(val):
            data.append(stats(task, val))
    if len(data) == 1:
        return 'Not available'
    return rst_table(data)
Exemple #7
0
def export_gmf_data_csv(ekey, dstore):
    oq = dstore['oqparam']
    if 'scenario' in oq.calculation_mode:
        imtls = dstore['oqparam'].imtls
        rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
        gsims = [str(rlz.gsim_rlz) for rlz in rlzs]
        n_gmfs = oq.number_of_ground_motion_fields
        fields = ['%03d' % i for i in range(n_gmfs)]
        dt = numpy.dtype([(f, F32) for f in fields])
        etags, gmfs_ = calc.get_gmfs(dstore)
        sitemesh = get_mesh(dstore['sitecol'])
        writer = writers.CsvWriter(fmt='%.5f')
        for gsim, gmfa in zip(gsims, gmfs_):  # gmfa of shape (N, E)
            for imt in imtls:
                gmfs = numpy.zeros(len(gmfa), dt)
                for i in range(len(gmfa)):
                    gmfs[i] = tuple(gmfa[imt][i])
                dest = dstore.build_fname('gmf', '%s-%s' % (gsim, imt), 'csv')
                data = util.compose_arrays(sitemesh, gmfs)
                writer.save(data, dest)
        return writer.getsaved()
    else:  # event based
        exporter = GmfExporter(dstore)
        sm_id, eid = get_sm_id_eid(ekey[0])
        if eid is None:
            logging.info('Exporting only the first event')
            logging.info('Use the command `oq export gmf_data:*:* %d` '
                         'to export everything', dstore.calc_id)
            return exporter.export_one(0, 0)
        elif eid == '*':
            return exporter.export_all()
        else:
            return exporter.export_one(int(sm_id), int(eid))
Exemple #8
0
def export_hazard_hdf5(ekey, dstore):
    mesh = get_mesh(dstore['sitecol'])
    fname = dstore.export_path('%s.%s' % ekey)
    with hdf5.File(fname, 'w') as f:
        for dskey, ds in dstore[ekey[0]].items():
            f['%s/%s' % (ekey[0], dskey)] = util.compose_arrays(mesh, ds.value)
    return [fname]
Exemple #9
0
def export_gmf_scenario_npz(ekey, dstore):
    # compute the GMFs on the fly from the stored rupture (if any)
    oq = dstore['oqparam']
    if 'scenario' not in oq.calculation_mode:
        logging.warn('GMF export not implemented for %s', oq.calculation_mode)
        return []
    sitemesh = get_mesh(dstore['sitecol'], complete=False)
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    gsims = rlzs_assoc.gsims_by_grp_id[0]  # there is a single grp_id
    E = oq.number_of_ground_motion_fields
    correl_model = oq.get_correl_model()
    computer = gmf.GmfComputer(
            dstore['ruptures/grp-00/0'], dstore['sitecol'], oq.imtls, gsims,
            oq.truncation_level, correl_model)
    fname = dstore.export_path('%s.%s' % ekey)
    gmf_dt = numpy.dtype([(imt, (F32, E)) for imt in oq.imtls])
    imts = list(oq.imtls)
    dic = {}
    for gsim in gsims:
        arr = computer.compute(gsim, E, oq.random_seed)
        I, S, E = arr.shape  # #IMTs, #sites, #events
        gmfa = numpy.zeros(S, gmf_dt)
        for imti, imt in enumerate(imts):
            gmfa[imt] = arr[imti]
        dic[str(gsim)] = util.compose_arrays(sitemesh, gmfa)
    savez(fname, **dic)
    return [fname]
Exemple #10
0
def export_csq_by_taxon_csv(ekey, dstore):
    taxonomies = add_quotes(dstore['assetcol/taxonomies'].value)
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    value = dstore[ekey[0]].value  # matrix T x R
    writer = writers.CsvWriter(fmt=FIVEDIGITS)
    for rlz, values in zip(rlzs, value.T):
        fname = dstore.build_fname(ekey[0], rlz.gsim_rlz, ekey[1])
        writer.save(compose_arrays(taxonomies, values, 'taxonomy'), fname)
    return writer.getsaved()
Exemple #11
0
def export_loss_maps_csv(ekey, dstore):
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    assets = get_assets(dstore)
    value = get_loss_maps(dstore, 'rlzs')
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    for rlz, values in zip(rlzs, value.T):
        fname = dstore.build_fname('loss_maps', rlz, ekey[1])
        writer.save(compose_arrays(assets, values), fname)
    return writer.getsaved()
Exemple #12
0
def extract_dmg_by_asset_npz(dstore, what):
    damage_dt = build_damage_dt(dstore)
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    data = dstore['dmg_by_asset']
    assets = util.get_assets(dstore)
    for rlz in rlzs:
        dmg_by_asset = build_damage_array(data[:, rlz.ordinal], damage_dt)
        yield 'rlz-%03d' % rlz.ordinal, util.compose_arrays(
            assets, dmg_by_asset)
Exemple #13
0
def export_rlzs_by_asset_csv(ekey, dstore):
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    assets = get_assets(dstore)
    value = dstore[ekey[0]].value  # matrix N x R or T x R
    writer = writers.CsvWriter(fmt=FIVEDIGITS)
    for rlz, values in zip(rlzs, value.T):
        fname = dstore.build_fname(ekey[0], rlz.gsim_rlz, ekey[1])
        writer.save(compose_arrays(assets, values), fname)
    return writer.getsaved()
Exemple #14
0
def export_uhs_hdf5(ekey, dstore):
    oq = dstore['oqparam']
    mesh = get_mesh(dstore['sitecol'])
    fname = dstore.export_path('%s.%s' % ekey)
    with hdf5.File(fname, 'w') as f:
        for dskey in dstore['hcurves']:
            hcurves = dstore['hcurves/%s' % dskey]
            uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(mesh))
            f['uhs/%s' % dskey] = util.compose_arrays(mesh, uhs_curves)
    return [fname]
Exemple #15
0
def view_slow_ruptures(token, dstore, maxrows=25):
    """
    Show the slowest ruptures
    """
    fields = ['code', 'n_occ', 'mag', 'trt_smr']
    rups = dstore['ruptures'][()][fields]
    time = dstore['gmf_data/time_by_rup'][()]
    arr = util.compose_arrays(rups, time)
    arr = arr[arr['nsites'] > 0]
    arr.sort(order='time')
    return arr[-maxrows:]
Exemple #16
0
def view_mean_avg_losses(token, dstore):
    try:
        array = dstore['avg_losses-stats']  # shape (N, S)
        data = array[:, 0]
    except KeyError:
        array = dstore['avg_losses-rlzs']  # shape (N, R)
        data = array[:, 0]
    assets = util.get_assets(dstore)
    losses = util.compose_arrays(assets, data)
    losses.sort()
    return rst_table(losses, fmt=FIVEDIGITS)
Exemple #17
0
def export_dmg_by_asset_csv(ekey, dstore):
    damage_dt = build_damage_dt(dstore)
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    data = dstore[ekey[0]]
    writer = writers.CsvWriter(fmt='%.6E')
    assets = get_assets(dstore)
    for rlz in rlzs:
        dmg_by_asset = build_damage_array(data[:, rlz.ordinal], damage_dt)
        fname = dstore.build_fname(ekey[0], rlz, ekey[1])
        writer.save(compose_arrays(assets, dmg_by_asset), fname)
    return writer.getsaved()
Exemple #18
0
def extract_gmf_scenario_npz(dstore, what):
    oq = dstore['oqparam']
    mesh = get_mesh(dstore['sitecol'])
    n = len(mesh)
    data = dstore['gmf_data/data'][()]
    rlz = dstore['events']['rlz_id']
    for rlzi in sorted(set(rlz)):
        idx = rlz[data['eid']] == rlzi
        gmfa = _gmf_scenario(data[idx], n, oq.imtls)
        logging.info('Exporting array%s for rlz#%d', gmfa.shape, rlzi)
        yield 'rlz-%03d' % rlzi, util.compose_arrays(mesh, gmfa)
Exemple #19
0
def view_mean_avg_losses(token, dstore):
    try:
        array = dstore['avg_losses-stats']  # shape (N, S)
        data = array[:, 0]
    except KeyError:
        array = dstore['avg_losses-rlzs']  # shape (N, R)
        data = array[:, 0]
    assets = util.get_assets(dstore)
    losses = util.compose_arrays(assets, data)
    losses.sort()
    return rst_table(losses, fmt='%8.6E')
Exemple #20
0
def export_csq_by_taxon_csv(ekey, dstore):
    taxonomies = dstore['assetcol/taxonomies'].value
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    R = len(rlzs)
    value = dstore[ekey[0]].value  # matrix T x R
    writer = writers.CsvWriter(fmt='%9.6E')
    for rlz, values in zip(rlzs, value.T):
        suffix = '.csv' if R == 1 else '-gsimltp_%s.csv' % rlz.uid
        fname = dstore.export_path(ekey[0] + suffix)
        writer.save(compose_arrays(taxonomies, values, 'taxonomy'), fname)
    return writer.getsaved()
Exemple #21
0
def export_rlzs_by_asset_csv(ekey, dstore):
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    assets = get_assets(dstore)
    R = len(rlzs)
    value = dstore[ekey[0]].value  # matrix N x R or T x R
    writer = writers.CsvWriter(fmt='%9.6E')
    for rlz, values in zip(rlzs, value.T):
        suffix = '.csv' if R == 1 else '-gsimltp_%s.csv' % rlz.uid
        fname = dstore.export_path(ekey[0] + suffix)
        writer.save(compose_arrays(assets, values), fname)
    return writer.getsaved()
Exemple #22
0
def extract_gmf_scenario_npz(dstore, what):
    oq = dstore['oqparam']
    mesh = get_mesh(dstore['sitecol'])
    n = len(mesh)
    data = dstore['gmf_data/data'][()]
    rlz = dstore['events']['rlz']
    for rlzi in sorted(set(rlz)):
        idx = rlz[data['eid']] == rlzi
        gmfa = _gmf_scenario(data[idx], n, oq.imtls)
        logging.info('Exporting array%s for rlz#%d', gmfa.shape, rlzi)
        yield 'rlz-%03d' % rlzi, util.compose_arrays(mesh, gmfa)
Exemple #23
0
def export_rcurves(ekey, dstore):
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    assets = get_assets(dstore)
    curves = compactify(dstore[ekey[0]].value)
    name = ekey[0].split('-')[0]
    writer = writers.CsvWriter(fmt=FIVEDIGITS)
    for rlz in rlzs:
        array = compose_arrays(assets, curves[:, rlz.ordinal])
        path = dstore.build_fname(name, rlz, 'csv')
        writer.save(array, path)
    return writer.getsaved()
Exemple #24
0
def export_rcurves(ekey, dstore):
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    assets = get_assets(dstore)
    curves = compactify(dstore[ekey[0]].value)
    name = ekey[0].split('-')[0]
    writer = writers.CsvWriter(fmt='%9.7E')
    for rlz in rlzs:
        array = compose_arrays(assets, curves[:, rlz.ordinal])
        path = dstore.export_path('%s-%s.csv' % (name, rlz.uid))
        writer.save(array, path)
    return writer.getsaved()
Exemple #25
0
def view_mean_avg_losses(token, dstore):
    dt = dstore['oqparam'].multiloss_dt()
    try:
        array = dstore['avg_losses-stats']  # shape (N, S)
    except KeyError:
        array = dstore['avg_losses-rlzs']  # shape (N, R)
    data = numpy.array([tuple(row) for row in array[:, 0]], dt)
    assets = util.get_assets(dstore)
    losses = util.compose_arrays(assets, data)
    losses.sort()
    return rst_table(losses, fmt=FIVEDIGITS)
Exemple #26
0
def export_rcurves(ekey, dstore):
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    assets = get_assets(dstore)
    curves = compactify(dstore[ekey[0]].value)
    name = ekey[0].split('-')[0]
    writer = writers.CsvWriter(fmt=FIVEDIGITS)
    for rlz in rlzs:
        array = compose_arrays(assets, curves[:, rlz.ordinal])
        path = dstore.export_path('%s-%s.csv' % (name, rlz.uid))
        writer.save(array, path)
    return writer.getsaved()
Exemple #27
0
def export_hcurves_hdf5(ekey, dstore):
    mesh = get_mesh(dstore['sitecol'])
    imtls = dstore['oqparam'].imtls
    fname = dstore.export_path('%s.%s' % ekey)
    with hdf5.File(fname, 'w') as f:
        f['imtls'] = imtls
        for dskey in dstore[ekey[0]]:
            curves = dstore['%s/%s' % (ekey[0], dskey)].convert(
                imtls, len(mesh))
            f['%s/%s' % (ekey[0], dskey)] = util.compose_arrays(mesh, curves)
    return [fname]
Exemple #28
0
def export_dmg_by_asset_csv(ekey, dstore):
    damage_dt = build_damage_dt(dstore)
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    data = dstore[ekey[0]]
    writer = writers.CsvWriter(fmt='%.6E')
    assets = get_assets(dstore)
    for rlz in rlzs:
        dmg_by_asset = build_damage_array(data[:, rlz.ordinal], damage_dt)
        fname = dstore.build_fname(ekey[0], rlz.gsim_rlz, ekey[1])
        writer.save(compose_arrays(assets, dmg_by_asset), fname)
    return writer.getsaved()
Exemple #29
0
def export_rlzs_by_asset_csv(ekey, dstore):
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    assets = get_assets(dstore)
    R = len(rlzs)
    value = dstore[ekey[0]].value  # matrix N x R or T x R
    writer = writers.CsvWriter(fmt='%9.6E')
    for rlz, values in zip(rlzs, value.T):
        suffix = '.csv' if R == 1 else '-gsimltp_%s.csv' % rlz.uid
        fname = dstore.export_path(ekey[0] + suffix)
        writer.save(compose_arrays(assets, values), fname)
    return writer.getsaved()
Exemple #30
0
def export_csq_by_taxon_csv(ekey, dstore):
    taxonomies = dstore['assetcol/taxonomies'].value
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    R = len(rlzs)
    value = dstore[ekey[0]].value  # matrix T x R
    writer = writers.CsvWriter(fmt='%9.6E')
    for rlz, values in zip(rlzs, value.T):
        suffix = '.csv' if R == 1 else '-gsimltp_%s.csv' % rlz.uid
        fname = dstore.export_path(ekey[0] + suffix)
        writer.save(compose_arrays(taxonomies, values, 'taxonomy'), fname)
    return writer.getsaved()
Exemple #31
0
def export_uhs_npz(ekey, dstore):
    oq = dstore['oqparam']
    mesh = get_mesh(dstore['sitecol'])
    fname = dstore.export_path('%s.%s' % ekey)
    dic = {}
    for dskey in dstore['hcurves']:
        hcurves = dstore['hcurves/%s' % dskey]
        uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(mesh))
        dic[dskey] = util.compose_arrays(mesh, uhs_curves)
    savez(fname, **dic)
    return [fname]
Exemple #32
0
def view_portfolio_losses(token, dstore):
    """
    The losses for the full portfolio, for each realization and loss type,
    extracted from the event loss table.
    """
    oq = dstore['oqparam']
    loss_dt = oq.loss_dt()
    data = portfolio_loss(dstore).view(loss_dt)[:, 0]
    rlzids = [str(r) for r in range(len(data))]
    array = util.compose_arrays(numpy.array(rlzids), data, 'rlz')
    # this is very sensitive to rounding errors, so I am using a low precision
    return rst_table(array, fmt='%.5E')
Exemple #33
0
def export_dmg_by_taxon_csv(ekey, dstore):
    damage_dt = build_damage_dt(dstore)
    taxonomies = add_quotes(dstore['assetcol/taxonomies'].value)
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    data = dstore[ekey[0]]
    writer = writers.CsvWriter(fmt='%.6E')
    for rlz in rlzs:
        dmg_by_taxon = build_damage_array(data[:, rlz.ordinal], damage_dt)
        fname = dstore.build_fname(ekey[0], rlz.gsim_rlz, ekey[1])
        array = compose_arrays(taxonomies, dmg_by_taxon, 'taxonomy')
        writer.save(array, fname)
    return writer.getsaved()
Exemple #34
0
def prepare_site_model(exposure_xml,
                       vs30_csv,
                       grid_spacing=0,
                       site_param_distance=5,
                       output='sites.csv'):
    """
    Prepare a site_model.csv file from an exposure xml file, a vs30 csv file
    and a grid spacing which can be 0 (meaning no grid). Sites far away from
    the vs30 records are discarded and you can see them with the command
    `oq plot_assets`. It is up to you decide if you need to fix your exposure
    or if it is right to ignore the discarded sites.
    """
    logging.basicConfig(level=logging.INFO)
    hdf5 = datastore.hdf5new()
    with performance.Monitor(hdf5.path, hdf5, measuremem=True) as mon:
        mesh, assets_by_site = Exposure.read(
            exposure_xml, check_dupl=False).get_mesh_assets_by_site()
        mon.hdf5['assetcol'] = assetcol = site.SiteCollection.from_points(
            mesh.lons, mesh.lats, req_site_params={'vs30'})
        if grid_spacing:
            grid = mesh.get_convex_hull().dilate(grid_spacing).discretize(
                grid_spacing)
            haz_sitecol = site.SiteCollection.from_points(
                grid.lons, grid.lats, req_site_params={'vs30'})
            logging.info(
                'Reducing exposure grid with %d locations to %d sites'
                ' with assets', len(haz_sitecol), len(assets_by_site))
            haz_sitecol, assets_by, _discarded = assoc(assets_by_site,
                                                       haz_sitecol,
                                                       grid_spacing * SQRT2,
                                                       'filter')
            haz_sitecol.make_complete()
        else:
            haz_sitecol = assetcol
        vs30orig = read_vs30(vs30_csv.split(','))
        logging.info('Associating %d hazard sites to %d site parameters',
                     len(haz_sitecol), len(vs30orig))
        sitecol, vs30, discarded = assoc(
            vs30orig, haz_sitecol, grid_spacing * SQRT2 or site_param_distance,
            'filter')
        sitecol.array['vs30'] = vs30['vs30']
        mon.hdf5['sitecol'] = sitecol
        if discarded:
            mon.hdf5['discarded'] = numpy.array(discarded)
        sids = numpy.arange(len(vs30), dtype=numpy.uint32)
        sites = compose_arrays(sids, vs30, 'site_id')
        write_csv(output, sites)
    if discarded:
        logging.info('Discarded %d sites with assets [use oq plot_assets]',
                     len(discarded))
    logging.info('Saved %d rows in %s' % (len(sitecol), output))
    logging.info(mon)
    return sitecol
Exemple #35
0
def export_dmg_by_asset_csv(ekey, dstore):
    damage_dt = build_damage_dt(dstore)
    rlzs = dstore['rlzs_assoc'].realizations
    data = dstore[ekey[0]]
    writer = writers.CsvWriter(fmt='%.6E')
    assets = get_assets(dstore)
    for rlz in rlzs:
        gsim, = rlz.value
        dmg_by_asset = build_damage_array(data[:, rlz.ordinal], damage_dt)
        fname = dstore.export_path('%s-%s.%s' % (ekey[0], gsim, ekey[1]))
        writer.save(compose_arrays(assets, dmg_by_asset), fname)
    return writer.getsaved()
Exemple #36
0
def export_dmg_by_asset_npz(ekey, dstore):
    damage_dt = build_damage_dt(dstore)
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    data = dstore[ekey[0]]
    assets = get_assets(dstore)
    dic = {}
    for rlz in rlzs:
        dmg_by_asset = build_damage_array(data[:, rlz.ordinal], damage_dt)
        dic['rlz-%03d' % rlz.ordinal] = compose_arrays(assets, dmg_by_asset)
    fname = dstore.export_path('%s.%s' % ekey)
    savez(fname, **dic)
    return [fname]
Exemple #37
0
def view_portfolio_losses(token, dstore):
    """
    The losses for the full portfolio, for each realization and loss type,
    extracted from the event loss table.
    """
    oq = dstore['oqparam']
    loss_dt = oq.loss_dt()
    data = _portfolio_loss(dstore).view(loss_dt)[:, 0]  # shape R
    rlzids = [str(r) for r in range(len(data))]
    array = util.compose_arrays(numpy.array(rlzids), data, 'rlz_id')
    # this is very sensitive to rounding errors, so I am using a low precision
    return text_table(array, fmt='%.5E')
Exemple #38
0
def export_dmg_by_taxon_csv(ekey, dstore):
    damage_dt = build_damage_dt(dstore)
    taxonomies = add_quotes(dstore['assetcol/taxonomies'].value)
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    data = dstore[ekey[0]]
    writer = writers.CsvWriter(fmt='%.6E')
    for rlz in rlzs:
        dmg_by_taxon = build_damage_array(data[:, rlz.ordinal], damage_dt)
        fname = dstore.build_fname(ekey[0], rlz, ekey[1])
        array = compose_arrays(taxonomies, dmg_by_taxon, 'taxonomy')
        writer.save(array, fname)
    return writer.getsaved()
Exemple #39
0
def view_mean_avg_losses(token, dstore):
    oq = dstore['oqparam']
    R = dstore['csm_info'].get_num_rlzs()
    if R == 1:  # one realization
        mean = dstore['avg_losses-rlzs'][:, 0]
    else:
        mean = dstore['avg_losses-stats'][:, 0]
    data = numpy.array([tuple(row) for row in mean], oq.loss_dt())
    assets = util.get_assets(dstore)
    losses = util.compose_arrays(assets, data)
    losses.sort()
    return rst_table(losses, fmt=FIVEDIGITS)
Exemple #40
0
def export_all_losses_npz(ekey, dstore):
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    assets = get_assets(dstore)
    losses = dstore['all_losses-rlzs']
    dic = {}
    for rlz in rlzs:
        rlz_losses = _compact(losses[:, :, rlz.ordinal])
        data = compose_arrays(assets, rlz_losses)
        dic['all_losses-%03d' % rlz.ordinal] = data
    fname = dstore.build_fname('all_losses', 'rlzs', 'npz')
    savez(fname, **dic)
    return [fname]
Exemple #41
0
def export_dmg_by_asset_csv(ekey, dstore):
    damage_dt = build_damage_dt(dstore)
    rlzs = dstore['rlzs_assoc'].realizations
    data = dstore[ekey[0]]
    writer = writers.CsvWriter(fmt='%.6E')
    assets = get_assets(dstore)
    for rlz in rlzs:
        gsim, = rlz.value
        dmg_by_asset = build_damage_array(data[:, rlz.ordinal], damage_dt)
        fname = dstore.export_path('%s-%s.%s' % (ekey[0], gsim, ekey[1]))
        writer.save(compose_arrays(assets, dmg_by_asset), fname)
    return writer.getsaved()
Exemple #42
0
 def save_events(self, rup_array):
     """
     :param rup_array: an array of ruptures with fields et_id
     :returns: a list of RuptureGetters
     """
     from openquake.calculators.getters import (get_eid_rlz,
                                                gen_rupture_getters)
     # this is very fast compared to saving the ruptures
     E = rup_array['n_occ'].sum()
     self.check_overflow(E)  # check the number of events
     events = numpy.zeros(E, rupture.events_dt)
     # when computing the events all ruptures must be considered,
     # including the ones far away that will be discarded later on
     rgetters = gen_rupture_getters(self.datastore,
                                    self.oqparam.concurrent_tasks)
     # build the associations eid -> rlz sequentially or in parallel
     # this is very fast: I saw 30 million events associated in 1 minute!
     logging.info('Associating event_id -> rlz_id for {:_d} events '
                  'and {:_d} ruptures'.format(len(events), len(rup_array)))
     iterargs = ((rg.proxies, rg.rlzs_by_gsim) for rg in rgetters)
     if len(events) < 1E5:
         it = itertools.starmap(get_eid_rlz, iterargs)
     else:
         it = parallel.Starmap(get_eid_rlz,
                               iterargs,
                               progress=logging.debug,
                               h5=self.datastore.hdf5)
     i = 0
     for eid_rlz in it:
         for er in eid_rlz:
             events[i] = er
             i += 1
             if i >= TWO32:
                 raise ValueError('There are more than %d events!' % i)
     events.sort(order='rup_id')  # fast too
     # sanity check
     n_unique_events = len(numpy.unique(events[['id', 'rup_id']]))
     assert n_unique_events == len(events), (n_unique_events, len(events))
     events['id'] = numpy.arange(len(events))
     # set event year and event ses starting from 1
     nses = self.oqparam.ses_per_logic_tree_path
     extra = numpy.zeros(len(events), [('year', U32), ('ses_id', U32)])
     numpy.random.seed(self.oqparam.ses_seed)
     if self.oqparam.investigation_time:
         itime = int(self.oqparam.investigation_time)
         extra['year'] = numpy.random.choice(itime, len(events)) + 1
     extra['ses_id'] = numpy.random.choice(nses, len(events)) + 1
     self.datastore['events'] = util.compose_arrays(events, extra)
     eindices = get_indices(events['rup_id'])
     arr = numpy.array(list(eindices.values()))[:, 0, :]
     self.datastore['ruptures']['e0'] = arr[:, 0]
     self.datastore['ruptures']['e1'] = arr[:, 1]
Exemple #43
0
def export_loss_curves(ekey, dstore):
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    loss_types = dstore.get_attr('composite_risk_model', 'loss_types')
    assets = get_assets(dstore)
    curves = dstore[ekey[0]]
    name = ekey[0].split('-')[0]
    writer = writers.CsvWriter(fmt='%9.6E')
    for rlz in rlzs:
        for ltype in loss_types:
            array = compose_arrays(assets, curves[ltype][:, rlz.ordinal])
            path = dstore.export_path('%s-%s-%s.csv' % (name, ltype, rlz.uid))
            writer.save(array, path)
    return writer.getsaved()
Exemple #44
0
def export_hmaps_csv(key, dest, sitemesh, array, comment):
    """
    Export the hazard maps of the given realization into CSV.

    :param key: output_type and export_type
    :param dest: name of the exported file
    :param sitemesh: site collection
    :param array: a composite array of dtype hmap_dt
    :param comment: comment to use as header of the exported CSV file
    """
    curves = util.compose_arrays(sitemesh, array)
    writers.write_csv(dest, curves, comment=comment)
    return [dest]
Exemple #45
0
def export_hmaps_csv(key, dest, sitemesh, array, comment):
    """
    Export the hazard maps of the given realization into CSV.

    :param key: output_type and export_type
    :param dest: name of the exported file
    :param sitemesh: site collection
    :param array: a composite array of dtype hmap_dt
    :param comment: comment to use as header of the exported CSV file
    """
    curves = util.compose_arrays(sitemesh, array)
    writers.write_csv(dest, curves, comment=comment)
    return [dest]
Exemple #46
0
def export_dmg_by_taxon_csv(ekey, dstore):
    damage_dt = build_damage_dt(dstore)
    taxonomies = dstore['assetcol/taxonomies'].value
    rlzs = dstore['rlzs_assoc'].realizations
    data = dstore[ekey[0]]
    writer = writers.CsvWriter(fmt='%.6E')
    for rlz in rlzs:
        gsim, = rlz.value
        dmg_by_taxon = build_damage_array(data[:, rlz.ordinal], damage_dt)
        fname = dstore.export_path('%s-%s.%s' % (ekey[0], gsim, ekey[1]))
        array = compose_arrays(taxonomies, dmg_by_taxon, 'taxonomy')
        writer.save(array, fname)
    return writer.getsaved()
Exemple #47
0
def export_hcurves_npz(ekey, dstore):
    mesh = get_mesh(dstore['sitecol'])
    imtls = dstore['oqparam'].imtls
    fname = dstore.export_path('%s.%s' % ekey)
    arr = numpy.zeros(1, imtls.dt)
    for imt in imtls:
        arr[imt] = imtls[imt]
    dic = dict(imtls=arr[0])
    for kind, hcurves in calc.PmapGetter(dstore).items():
        curves = hcurves.convert(imtls, len(mesh))
        dic[kind] = util.compose_arrays(mesh, curves)
    savez(fname, **dic)
    return [fname]
Exemple #48
0
def extract_losses_by_asset(dstore, what):
    loss_dt = dstore['oqparam'].loss_dt()
    rlzs = dstore['full_lt'].get_realizations()
    assets = util.get_assets(dstore)
    if 'losses_by_asset' in dstore:
        losses_by_asset = dstore['losses_by_asset'][()]
        for rlz in rlzs:
            # I am exporting the 'mean' and ignoring the 'stddev'
            losses = cast(losses_by_asset[:, rlz.ordinal]['mean'], loss_dt)
            data = util.compose_arrays(assets, losses)
            yield 'rlz-%03d' % rlz.ordinal, data
    elif 'avg_losses-stats' in dstore:
        aw = hdf5.ArrayWrapper.from_(dstore['avg_losses-stats'])
        for s, stat in enumerate(aw.stat):
            losses = cast(aw[:, s], loss_dt)
            data = util.compose_arrays(assets, losses)
            yield stat, data
    elif 'avg_losses-rlzs' in dstore:  # there is only one realization
        avg_losses = dstore['avg_losses-rlzs'][()]
        losses = cast(avg_losses, loss_dt)
        data = util.compose_arrays(assets, losses)
        yield 'rlz-000', data
Exemple #49
0
def export_dmg_by_taxon_csv(ekey, dstore):
    damage_dt = build_damage_dt(dstore)
    taxonomies = dstore['assetcol/taxonomies'].value
    rlzs = dstore['rlzs_assoc'].realizations
    data = dstore[ekey[0]]
    writer = writers.CsvWriter(fmt='%.6E')
    for rlz in rlzs:
        gsim, = rlz.value
        dmg_by_taxon = build_damage_array(data[:, rlz.ordinal], damage_dt)
        fname = dstore.export_path('%s-%s.%s' % (ekey[0], gsim, ekey[1]))
        array = compose_arrays(taxonomies, dmg_by_taxon, 'taxonomy')
        writer.save(array, fname)
    return writer.getsaved()
Exemple #50
0
def export_loss_curves(ekey, dstore):
    rlzs = dstore['rlzs_assoc'].realizations
    loss_types = dstore.get_attr('composite_risk_model', 'loss_types')
    assets = get_assets(dstore)
    curves = dstore[ekey[0]]
    name = ekey[0].split('-')[0]
    writer = writers.CsvWriter(fmt='%9.6E')
    for rlz in rlzs:
        for ltype in loss_types:
            array = compose_arrays(assets, curves[ltype][:, rlz.ordinal])
            path = dstore.export_path('%s-%s-%s.csv' % (name, ltype, rlz.uid))
            writer.save(array, path)
    return writer.getsaved()
Exemple #51
0
def view_mean_avg_losses(token, dstore):
    dt = dstore['oqparam'].loss_dt()
    weights = dstore['realizations']['weight']
    array = dstore['avg_losses-rlzs'].value  # shape (N, R)
    if len(weights) == 1:  # one realization
        mean = array[:, 0]
    else:
        mean = hstats.compute_stats2(array, [hstats.mean_curve], weights)[:, 0]
    data = numpy.array([tuple(row) for row in mean], dt)
    assets = util.get_assets(dstore)
    losses = util.compose_arrays(assets, data)
    losses.sort()
    return rst_table(losses, fmt=FIVEDIGITS)
Exemple #52
0
def extract_gmf_npz(dstore, what):
    oq = dstore['oqparam']
    qdict = parse(what)
    [eid] = qdict.get('event_id', [None])
    mesh = get_mesh(dstore['sitecol'])
    n = len(mesh)
    data = dstore['gmf_data/data']
    if eid is None:  # get all events
        rlz = dstore['events']['rlz_id']
        for rlzi in sorted(set(rlz)):
            idx = rlz[data['eid']] == rlzi
            gmfa = _gmf(data[idx], n, oq.imtls)
            logging.info('Exporting array%s for rlz#%d', gmfa.shape, rlzi)
            yield 'rlz-%03d' % rlzi, util.compose_arrays(mesh, gmfa)
    else:  # get a single event
        rlzi = dstore['events'][eid]['rlz_id']
        idx = data['eid'] == eid
        if idx.any():
            gmfa = _gmf(data[idx], n, oq.imtls)
            yield 'rlz-%03d' % rlzi, util.compose_arrays(mesh, gmfa)
        else:  # zero GMF
            yield 'rlz-%03d' % rlzi, []
Exemple #53
0
def export_gmf_scenario_csv(ekey, dstore):
    what = ekey[0].split('/')
    if len(what) == 1:
        raise ValueError('Missing "/rup-\d+"')
    oq = dstore['oqparam']
    csm_info = dstore['csm_info']
    rlzs_assoc = csm_info.get_rlzs_assoc()
    samples = csm_info.get_samples_by_grp()
    imts = list(oq.imtls)
    mo = re.match('rup-(\d+)$', what[1])
    if mo is None:
        raise ValueError("Invalid format: %r does not match 'rup-(\d+)$'" %
                         what[1])
    rup_id = int(mo.group(1))
    grp_ids = sorted(int(grp[4:]) for grp in dstore['ruptures'])
    events = dstore['events']
    ruptures = list(calc._get_ruptures(dstore, events, grp_ids, rup_id))
    if not ruptures:
        logging.warn('There is no rupture %d', rup_id)
        return []
    [ebr] = ruptures
    rlzs_by_gsim = rlzs_assoc.get_rlzs_by_gsim(ebr.grp_id)
    samples = samples[ebr.grp_id]
    min_iml = calc.fix_minimum_intensity(oq.minimum_intensity, imts)
    correl_model = oq.get_correl_model()
    sitecol = dstore['sitecol'].complete
    getter = GmfGetter(rlzs_by_gsim, ruptures, sitecol, imts, min_iml,
                       oq.truncation_level, correl_model, samples)
    getter.init()
    hazardr = getter.get_hazard()
    rlzs = rlzs_assoc.realizations
    fields = ['eid-%03d' % eid for eid in getter.eids]
    dt = numpy.dtype([(f, F32) for f in fields])
    mesh = numpy.zeros(len(ebr.sids), [('lon', F64), ('lat', F64)])
    mesh['lon'] = sitecol.lons[ebr.sids]
    mesh['lat'] = sitecol.lats[ebr.sids]
    writer = writers.CsvWriter(fmt='%.5f')
    for rlzi in range(len(rlzs)):
        hazard = hazardr[rlzi]
        for imti, imt in enumerate(imts):
            gmfs = numpy.zeros(len(ebr.sids), dt)
            for s, sid in enumerate(ebr.sids):
                for rec in hazard[sid]:
                    event = 'eid-%03d' % rec['eid']
                    gmfs[s][event] = rec['gmv'][imti]
            dest = dstore.build_fname('gmf',
                                      'rup-%s-rlz-%s-%s' % (rup_id, rlzi, imt),
                                      'csv')
            data = util.compose_arrays(mesh, gmfs)
            writer.save(data, dest)
    return writer.getsaved()
Exemple #54
0
def extract_losses_by_asset(dstore, what):
    loss_dt = dstore['oqparam'].loss_dt()
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    assets = util.get_assets(dstore)
    if 'losses_by_asset' in dstore:
        losses_by_asset = dstore['losses_by_asset'][()]
        for rlz in rlzs:
            # I am exporting the 'mean' and ignoring the 'stddev'
            losses = cast(losses_by_asset[:, rlz.ordinal]['mean'], loss_dt)
            data = util.compose_arrays(assets, losses)
            yield 'rlz-%03d' % rlz.ordinal, data
    elif 'avg_losses-stats' in dstore:
        avg_losses = dstore['avg_losses-stats'][()]
        stats = decode(dstore['avg_losses-stats'].attrs['stats'])
        for s, stat in enumerate(stats):
            losses = cast(avg_losses[:, s], loss_dt)
            data = util.compose_arrays(assets, losses)
            yield stat, data
    elif 'avg_losses-rlzs' in dstore:  # there is only one realization
        avg_losses = dstore['avg_losses-rlzs'][()]
        losses = cast(avg_losses, loss_dt)
        data = util.compose_arrays(assets, losses)
        yield 'rlz-000', data
Exemple #55
0
def export_loss_maps_csv(ekey, dstore):
    kind = ekey[0].split('-')[1]  # rlzs or stats
    assets = get_assets(dstore)
    value = get_loss_maps(dstore, kind)
    if kind == 'rlzs':
        tags = dstore['csm_info'].get_rlzs_assoc().realizations
    else:
        oq = dstore['oqparam']
        tags = ['mean'] + ['quantile-%s' % q for q in oq.quantile_loss_curves]
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    for tag, values in zip(tags, value.T):
        fname = dstore.build_fname('loss_maps', tag, ekey[1])
        writer.save(compose_arrays(assets, values), fname)
    return writer.getsaved()
Exemple #56
0
def export_damages_csv(ekey, dstore):
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    oq = dstore['oqparam']
    assets = get_assets(dstore)
    value = dstore[ekey[0]].value  # matrix N x R or T x R
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    if ekey[0].endswith('stats'):
        tags = ['mean'] + ['quantile-%s' % q for q in oq.quantile_loss_curves]
    else:
        tags = ['rlz-%03d' % r for r in range(len(rlzs))]
    for tag, values in zip(tags, value.T):
        fname = dstore.build_fname('damages', tag, ekey[1])
        writer.save(compose_arrays(assets, values), fname)
    return writer.getsaved()
Exemple #57
0
def export_gmf_data_csv(ekey, dstore):
    oq = dstore['oqparam']
    rlzs = dstore['full_lt'].get_realizations()
    imts = list(oq.imtls)
    sc = dstore['sitecol'].array
    arr = sc[['lon', 'lat']]
    eid = int(ekey[0].split('/')[1]) if '/' in ekey[0] else None
    gmfa = numpy.zeros(len(dstore['gmf_data/eid']), oq.gmf_data_dt())
    df = dstore.read_df('gmf_data', 'sid')
    gmfa['eid'] = df.eid.to_numpy()
    gmfa['sid'] = df.index.to_numpy()
    for m in range(len(imts)):
        gmfa['gmv'][:, m] = df[f'gmv_{m}'].to_numpy()
    event_id = dstore['events']['id']
    gmfa['eid'] = event_id[gmfa['eid']]
    if eid is None:  # we cannot use extract here
        f = dstore.build_fname('sitemesh', '', 'csv')
        sids = numpy.arange(len(arr), dtype=U32)
        sites = util.compose_arrays(sids, arr, 'site_id')
        writers.write_csv(f, sites)
        fname = dstore.build_fname('gmf', 'data', 'csv')
        gmfa.sort(order=['eid', 'sid'])
        writers.write_csv(fname,
                          _expand_gmv(gmfa, imts),
                          renamedict={
                              'sid': 'site_id',
                              'eid': 'event_id'
                          })
        if 'sigma_epsilon' in dstore['gmf_data']:
            sig_eps_csv = dstore.build_fname('sigma_epsilon', '', 'csv')
            sig_eps = dstore['gmf_data/sigma_epsilon'][()]
            sig_eps['eid'] = event_id[sig_eps['eid']]
            sig_eps.sort(order='eid')
            header = list(sig_eps.dtype.names)
            header[0] = 'event_id'
            writers.write_csv(sig_eps_csv, sig_eps, header=header)
            return [fname, sig_eps_csv, f]
        else:
            return [fname, f]
    # old format for single eid
    # TODO: is this still used?
    gmfa = gmfa[gmfa['eid'] == eid]
    eid2rlz = dict(dstore['events'])
    rlzi = eid2rlz[eid]
    rlz = rlzs[rlzi]
    data, comment = _build_csv_data(gmfa, rlz, dstore['sitecol'], imts,
                                    oq.investigation_time)
    fname = dstore.build_fname('gmf', '%d-rlz-%03d' % (eid, rlzi), 'csv')
    return writers.write_csv(fname, data, comment=comment)
Exemple #58
0
def export_agg_losses(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    agg_losses = compactify(dstore[ekey[0]].value)
    rlzs = dstore['rlzs_assoc'].realizations
    etags = dstore['etags'].value
    writer = writers.CsvWriter(fmt='%10.6E')
    for rlz in rlzs:
        losses = agg_losses[:, rlz.ordinal]
        dest = dstore.export_path('agg_losses-rlz%03d.csv' % rlz.ordinal)
        data = compose_arrays(etags, losses)
        writer.save(data, dest)
    return writer.getsaved()
Exemple #59
0
def export_hazard_csv(key, dest, sitemesh, pmap, imtls, comment):
    """
    Export the curves of the given realization into CSV.

    :param key: output_type and export_type
    :param dest: name of the exported file
    :param sitemesh: site collection
    :param pmap: a ProbabilityMap
    :param dict imtls: intensity measure types and levels
    :param comment: comment to use as header of the exported CSV file
    """
    curves = util.compose_arrays(
        sitemesh, calc.convert_to_array(pmap, len(sitemesh), imtls))
    writers.write_csv(dest, curves, comment=comment)
    return [dest]
Exemple #60
0
def export_agg_losses(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    agg_losses = dstore[ekey[0]].value
    rlzs = dstore['csm_info'].get_rlzs_assoc().realizations
    eids = calc.build_eids(dstore['events'], 0)
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    for rlz in rlzs:
        losses = agg_losses[:, rlz.ordinal]
        dest = dstore.build_fname('agg_losses', rlz, 'csv')
        data = compose_arrays(eids, losses)
        writer.save(data, dest)
    return writer.getsaved()