Пример #1
0
def export_gmf_csv(key, export_dir, fname, sitecol, ruptures, gmfs, rlz,
                   investigation_time):
    """
    :param key: output_type and export_type
    :param export_dir: the directory where to export
    :param fname: name of the exported file
    :param sitecol: the full site collection
    :param ruptures: an ordered list of ruptures
    :param gmfs: an orderd list of ground motion fields
    :param rlz: a realization object
    :param investigation_time: investigation time (None for scenario)
    """
    dest = os.path.join(export_dir, fname)
    imts = list(gmfs[0].dtype.fields)
    # the csv file has the form
    # tag,indices,gmvs_imt_1,...,gmvs_imt_N
    rows = []
    for rupture, gmf in zip(ruptures, gmfs):
        try:
            indices = rupture.indices
        except AttributeError:
            indices = sitecol.indices
        if indices is None:
            indices = range(len(sitecol))
        row = [rupture.tag, ' '.join(map(str, indices))] + \
              [gmf[imt] for imt in imts]
        rows.append(row)
    save_csv(dest, rows)
    return {key: [dest]}
Пример #2
0
 def test(self):
     job = self._run_test()
     data = [[curve.asset_ref, curve.average_loss]
             for curve in models.LossCurveData.objects.filter(
                 loss_curve__output__oq_job=job).order_by('asset_ref')]
     fd, fname = tempfile.mkstemp(suffix='.csv')
     os.close(fd)
     writers.save_csv(fname, [['asset_ref', 'avg_loss']] + data,
                      fmt='%11.8E')
     expected = self._test_path('expected/rlz-000-avg_loss.csv')
     self.assertEqual(open(fname).read(), open(expected).read())
Пример #3
0
 def test(self):
     job = self._run_test()
     data = [[curve.asset_ref, curve.average_loss]
             for curve in models.LossCurveData.objects.filter(
                 loss_curve__output__oq_job=job).order_by('asset_ref')]
     fd, fname = tempfile.mkstemp(suffix='.csv')
     os.close(fd)
     writers.save_csv(
         fname, [['asset_ref', 'avg_loss']] + data, fmt='%11.8E')
     expected = self._test_path('expected/rlz-000-avg_loss.csv')
     self.assertEqual(open(fname).read(), open(expected).read())
Пример #4
0
def export_sitecol_csv(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    dest = dstore.export_path(*ekey)
    rows = []
    for site in dstore['sitecol']:
        rows.append([site.id, site.location.x, site.location.y, site.vs30,
                     site.vs30measured, site.z1pt0, site.z2pt5, site.backarc])
    save_csv(dest, sorted(rows, key=operator.itemgetter(0)))
    return [dest]
Пример #5
0
def export_loss_map_csv(key, output, target):
    """
    Export `output` to `target` in CSV format
    """
    dest = _get_result_export_dest(target, output, file_ext=key[1])
    data = []
    for row in models.order_by_location(
            output.loss_map.lossmapdata_set.all().order_by('asset_ref')):
        data.append(LossMapPerAsset(row.asset_ref, row.value))
    header = [data[0]._fields]
    writers.save_csv(dest, header + data, fmt='%10.6E')
    return dest
Пример #6
0
def export_loss_map_csv(key, output, target):
    """
    Export `output` to `target` in CSV format
    """
    dest = _get_result_export_dest(target, output, file_ext=key[1])
    data = []
    for row in models.order_by_location(
            output.loss_map.lossmapdata_set.all().order_by('asset_ref')):
        data.append(scientific.LossMapPerAsset(row.asset_ref, row.value))
    header = [data[0]._fields]
    writers.save_csv(dest, header + data, fmt='%10.6E')
    return dest
Пример #7
0
def export_avgloss_csv(key, output, target):
    """
    Export `output` to `target` in csv format for a given loss type
    """
    dest = _get_result_export_dest(target, output)[:-3] + 'csv'
    data = output.loss_curve.losscurvedata_set.all().order_by('asset_ref')
    header = ['lon', 'lat', 'asset_ref', 'asset_value', 'average_loss',
              'stddev_loss', 'loss_type']
    rows = [(c.location.x, c.location.y, c.asset_ref, c.asset_value,
             c.average_loss, c.stddev_loss or '', c.loss_curve.loss_type)
            for c in data]
    writers.save_csv(dest, [header] + rows)
    return dest
Пример #8
0
def export_loss_curve_csv(key, output, target):
    """
    Export `output` to `target` in CSV format
    """
    dest = _get_result_export_dest(target, output)[:-3] + 'csv'
    data = []
    for row in output.loss_curve.losscurvedata_set.all().order_by('asset_ref'):
        lca = LossCurvePerAsset(
            row.asset_ref, row.losses, row.poes, row.average_loss)
        data.append(lca)
    header = [lca._fields]
    writers.save_csv(dest, header + data, fmt='%10.6E')
    return dest
Пример #9
0
def export_loss_curve_csv(key, output, target):
    """
    Export `output` to `target` in CSV format
    """
    dest = _get_result_export_dest(target, output)[:-3] + 'csv'
    data = []
    for row in output.loss_curve.losscurvedata_set.all().order_by('asset_ref'):
        lca = scientific.LossCurvePerAsset(row.asset_ref, row.losses, row.poes,
                                           row.average_loss)
        data.append(lca)
    header = [lca._fields]
    writers.save_csv(dest, header + data, fmt='%10.6E')
    return dest
Пример #10
0
def export_event_loss_asset_csv(key, output, target):
    """
    Export Event Loss Per Asset in CSV format
    """
    dest = _get_result_export_dest(target, output)
    rows = []
    for event_loss in models.EventLossAsset.objects.filter(
            event_loss__output=output).select_related().order_by(
                'rupture__tag', 'asset__asset_ref'):
        rows.append([
            event_loss.rupture.tag, event_loss.asset.asset_ref, event_loss.loss
        ])
    writers.save_csv(dest, rows)
    return dest
Пример #11
0
def export_event_loss_asset_csv(key, output, target):
    """
    Export Event Loss Per Asset in CSV format
    """
    dest = _get_result_export_dest(target, output)
    rows = []
    for event_loss in models.EventLossAsset.objects.filter(
            event_loss__output=output).select_related().order_by(
            'rupture__tag', 'asset__asset_ref'):
        rows.append([event_loss.rupture.tag,
                     event_loss.asset.asset_ref,
                     event_loss.loss])
    writers.save_csv(dest, rows)
    return dest
Пример #12
0
def export_avgloss_csv(key, output, target):
    """
    Export `output` to `target` in csv format for a given loss type
    """
    dest = _get_result_export_dest(target, output)[:-3] + 'csv'
    data = output.loss_curve.losscurvedata_set.all().order_by('asset_ref')
    header = [
        'lon', 'lat', 'asset_ref', 'asset_value', 'average_loss',
        'stddev_loss', 'loss_type'
    ]
    rows = [(c.location.x, c.location.y, c.asset_ref, c.asset_value,
             c.average_loss, c.stddev_loss or '', c.loss_curve.loss_type)
            for c in data]
    writers.save_csv(dest, [header] + rows)
    return dest
Пример #13
0
def export_uhs_csv(key, export_dir, fname, sitecol, hmaps):
    """
    Export the scalar outputs.

    :param key: output_type and export_type
    :param export_dir: the directory where to export
    :param fname: file name
    :param sitecol: site collection
    :param hmaps:
        an array N x I x P where N is the number of sites,
        I the number of IMTs of SA type, and P the number of poes
    """
    dest = os.path.join(export_dir, fname)
    rows = ([[lon, lat]] + list(row)
            for lon, lat, row in zip(sitecol.lons, sitecol.lats, hmaps))
    save_csv(dest, rows)
    return {fname: dest}
Пример #14
0
def export_loss_csv(key, export_dir, data, suffix):
    """
    Export (aggregate) losses in CSV.

    :param key: per_asset_loss|asset-ins
    :param export_dir: the export directory
    :param data: a list [(loss_type, unit, asset_ref, mean, stddev), ...]
    :param suffix: a suffix specifying the GSIM realization
    """
    dest = os.path.join(export_dir, '%s%s.%s' % (key[0], suffix, key[1]))
    if key[0] in ('agg', 'ins'):  # aggregate
        header = ['LossType', 'Unit', 'Mean', 'Standard Deviation']
    else:
        header = ['LossType', 'Unit', 'Asset', 'Mean', 'Standard Deviation']
        data.sort(key=operator.itemgetter(2))  # order by asset_ref
    writers.save_csv(dest, [header] + data, fmt='%11.7E')
    return dest
Пример #15
0
def export_agg_loss_curve_csv(key, output, target):
    """
    Export `output` to `target` in csv format
    """
    dest = _get_result_export_dest(target, output)[:-3] + 'csv'
    row = output.loss_curve.aggregatelosscurvedata
    data = ('aggregate', row.losses, row.poes, row.average_loss,
            row.stddev_loss)
    return writers.save_csv(dest, [data], fmt='%10.6E')
Пример #16
0
def export_agg_loss_curve_csv(key, output, target):
    """
    Export `output` to `target` in csv format
    """
    dest = _get_result_export_dest(target, output)[:-3] + 'csv'
    row = output.loss_curve.aggregatelosscurvedata
    data = ('aggregate', row.losses, row.poes, row.average_loss,
            row.stddev_loss)
    return writers.save_csv(dest, [data], fmt='%10.6E')
Пример #17
0
def export_gmf_csv(key, output, target):
    """
    Export the GMF Collection specified by ``output`` to the ``target``.

    :param output:
        :class:`openquake.engine.db.models.Output` with an `output_type` of
        `gmf`.
    :param target:
        The same ``target`` as :func:`export`.

    :returns:
        The same return value as defined by :func:`export`.
    """
    haz_calc = output.oq_job
    dest = _get_result_export_dest(
        haz_calc.id, target, output.gmf)[:-3] + 'csv'
    # export the GMFs ordered by tag
    save_csv(dest, sorted(_gen_gmf_rows(output), key=operator.itemgetter(0)))
    return dest
Пример #18
0
def export_gmf_csv(key, output, target):
    """
    Export the GMF Collection specified by ``output`` to the ``target``.

    :param output:
        :class:`openquake.engine.db.models.Output` with an `output_type` of
        `gmf`.
    :param target:
        The same ``target`` as :func:`export`.

    :returns:
        The same return value as defined by :func:`export`.
    """
    haz_calc = output.oq_job
    dest = _get_result_export_dest(haz_calc.id, target,
                                   output.gmf)[:-3] + 'csv'
    # export the GMFs ordered by tag
    save_csv(dest, sorted(_gen_gmf_rows(output), key=operator.itemgetter(0)))
    return dest
Пример #19
0
 def test(self):
     job = self._run_test()
     outputs = models.Output.objects.filter(
         oq_job=job, output_type='loss_curve').order_by('id')
     for out in outputs:
         if out.display_name.startswith('Mean'):
             continue
         loss_curve = out.loss_curve
         rlz = loss_curve.hazard_output.hazard_curve.lt_realization
         key = 'rlz-%03d-avg_loss' % rlz.ordinal
         data = [[curve.asset_ref, curve.average_loss]
                 for curve in models.LossCurveData.objects.filter(
                     loss_curve=loss_curve).order_by('asset_ref')]
         fd, fname = tempfile.mkstemp(prefix=key, suffix='.csv')
         os.close(fd)
         writers.save_csv(
             fname, [['asset_ref', 'avg_loss']] + data, fmt='%11.8E')
         expected = self._test_path('expected/%s.csv' % key)
         self.assertEqual(open(fname).read(), open(expected).read())
Пример #20
0
def export_stats_csv(key, export_dir, fname, sitecol, data_by_imt):
    """
    Export the scalar outputs.

    :param key: output_type and export_type
    :param export_dir: the directory where to export
    :param fname: file name
    :param sitecol: site collection
    :param data_by_imt: dictionary of floats keyed by IMT
    """
    dest = os.path.join(export_dir, fname)
    rows = []
    for imt in sorted(data_by_imt):
        row = [imt]
        for col in data_by_imt[imt]:
            row.append(scientificformat(col))
        rows.append(row)
    save_csv(dest, numpy.array(rows).T)
    return {fname: dest}
Пример #21
0
 def test(self):
     job = self._run_test()
     outputs = models.Output.objects.filter(
         oq_job=job, output_type='loss_curve').order_by('id')
     for out in outputs:
         if out.display_name.startswith('Mean'):
             continue
         loss_curve = out.loss_curve
         rlz = loss_curve.hazard_output.hazard_curve.lt_realization
         key = 'rlz-%03d-avg_loss' % rlz.ordinal
         data = [[curve.asset_ref, curve.average_loss]
                 for curve in models.LossCurveData.objects.filter(
                     loss_curve=loss_curve).order_by('asset_ref')]
         fd, fname = tempfile.mkstemp(prefix=key, suffix='.csv')
         os.close(fd)
         writers.save_csv(fname, [['asset_ref', 'avg_loss']] + data,
                          fmt='%11.8E')
         expected = self._test_path('expected/%s.csv' % key)
         self.assertEqual(open(fname).read(), open(expected).read())
Пример #22
0
def export_hazard_curves_csv(key, export_dir, fname, sitecol, curves_by_imt,
                             imtls, investigation_time=None):
    """
    Export the curves of the given realization into XML.

    :param key: output_type and export_type
    :param export_dir: the directory where to export
    :param fname: name of the exported file
    :param sitecol: site collection
    :param curves_by_imt: dictionary with the curves keyed by IMT
    """
    dest = os.path.join(export_dir, fname)
    nsites = len(sitecol)
    # build a matrix of strings with size nsites * (num_imts + 1)
    # the + 1 is needed since the 0-th column contains lon lat
    rows = numpy.empty((nsites, len(imtls) + 1), dtype=object)
    for sid, lon, lat in zip(range(nsites), sitecol.lons, sitecol.lats):
        rows[sid, 0] = '%s %s' % (lon, lat)
    for i, imt in enumerate(sorted(curves_by_imt.dtype.fields), 1):
        for sid, curve in zip(range(nsites), curves_by_imt[imt]):
            rows[sid, i] = scientificformat(curve, fmt='%11.7E')
    save_csv(dest, rows)
    return {fname: dest}
Пример #23
0
def export_ses_csv(key, output, target):
    """
    Export the Stochastic Event Set Collection specified by ``output`` to the
    ``target`` in csv format.

    :param output:
        :class:`openquake.engine.db.models.Output` with an `output_type` of
        `ses`.
    :param str target:
        Destination directory location for exported files.

    :returns:
        The exported file path
    """
    ses_coll = models.SESCollection.objects.get(output=output.id)
    haz_calc = output.oq_job
    dest = _get_result_export_dest(
        haz_calc.id, target, output.ses)[:-3] + 'csv'
    rows = []
    for ses in ses_coll:
        for sesrup in ses:
            rows.append([sesrup.tag, sesrup.seed])
    save_csv(dest, sorted(rows, key=operator.itemgetter(0)))
    return dest
Пример #24
0
def export_ses_csv(key, output, target):
    """
    Export the Stochastic Event Set Collection specified by ``output`` to the
    ``target`` in csv format.

    :param output:
        :class:`openquake.engine.db.models.Output` with an `output_type` of
        `ses`.
    :param str target:
        Destination directory location for exported files.

    :returns:
        The exported file path
    """
    ses_coll = models.SESCollection.objects.get(output=output.id)
    haz_calc = output.oq_job
    dest = _get_result_export_dest(haz_calc.id, target,
                                   output.ses)[:-3] + 'csv'
    rows = []
    for ses in ses_coll:
        for sesrup in ses:
            rows.append([sesrup.tag, sesrup.seed])
    save_csv(dest, sorted(rows, key=operator.itemgetter(0)))
    return dest
Пример #25
0
def _export_ses_csv(dest, ses_coll):
    rows = []
    for ses in ses_coll:
        for sesrup in ses:
            rows.append([sesrup.tag, sesrup.seed])
    save_csv(dest, sorted(rows, key=operator.itemgetter(0)))