Ejemplo n.º 1
0
 def __call__(self, dstore, key):
     if '/' in key:
         k, v = key.split('/', 1)
         data = self[k](dstore, v)
     elif '?' in key:
         k, v = key.split('?', 1)
         data = self[k](dstore, v)
     elif key in self:
         data = self[key](dstore, '')
     else:
         data = extract_(dstore, key)
     return ArrayWrapper.from_(data)
Ejemplo n.º 2
0
def extract_aggregate(dstore, what):
    """
    /extract/aggregate/avg_losses?
    kind=mean&loss_type=structural&tag=taxonomy&tag=occupancy
    """
    name, qstring = what.split('?', 1)
    info = get_info(dstore)
    qdic = parse(qstring, info)
    suffix = '-rlzs' if qdic['rlzs'] else '-stats'
    tagnames = qdic.get('tag', [])
    assetcol = dstore['assetcol']
    ltypes = qdic.get('loss_type', [])
    if ltypes:
        array = dstore[name + suffix][:, qdic['k'][0], ltypes[0]]
    else:
        array = dstore[name + suffix][:, qdic['k'][0]]
    aw = ArrayWrapper(assetcol.aggregate_by(tagnames, array), {})
    for tagname in tagnames:
        setattr(aw, tagname, getattr(assetcol.tagcol, tagname))
    aw.tagnames = encode(tagnames)
    if not ltypes:
        aw.extra = ('loss_type',) + tuple(info['loss_types'])
    return aw
Ejemplo n.º 3
0
 def get(self, what):
     """
     :param what: what to extract
     :returns: an ArrayWrapper instance
     """
     url = '%s/v1/calc/%d/extract/%s' % (self.server, self.calc_id, what)
     logging.info('GET %s', url)
     resp = self.sess.get(url)
     if resp.status_code != 200:
         raise WebAPIError(resp.text)
     logging.info('Read %s of data' % general.humansize(len(resp.content)))
     npz = numpy.load(io.BytesIO(resp.content))
     attrs = {k: npz[k] for k in npz if k != 'array'}
     try:
         arr = npz['array']
     except KeyError:
         arr = ()
     return ArrayWrapper(arr, attrs)
Ejemplo n.º 4
0
def extract_assets(dstore, what):
    """
    Extract an array of assets, optionally filtered by tag.
    Use it as /extract/assets?taxonomy=RC&taxonomy=MSBC&occupancy=RES
    """
    qdict = parse(what)
    dic = {}
    dic1, dic2 = dstore['assetcol/tagcol'].__toh5__()
    dic.update(dic1)
    dic.update(dic2)
    arr = dstore['assetcol/array'][()]
    for tag, vals in qdict.items():
        cond = numpy.zeros(len(arr), bool)
        for val in vals:
            tagidx, = numpy.where(dic[tag] == val)
            cond |= arr[tag] == tagidx
        arr = arr[cond]
    return ArrayWrapper(arr, dic)
Ejemplo n.º 5
0
def extract_disagg_layer(dstore, what):
    """
    Extract a disaggregation layer containing all sites and outputs
    Example:
    http://127.0.0.1:8800/v1/calc/30/extract/disagg_layer?
    """
    qdict = parse(what)
    oq = dstore['oqparam']
    if 'kind' in qdict:
        kinds = qdict['kind']
    else:
        kinds = oq.disagg_outputs
    sitecol = dstore['sitecol']
    poes_disagg = oq.poes_disagg or (None, )
    edges, shapedic = disagg.get_edges_shapedic(oq, sitecol,
                                                dstore['source_mags'])
    dt = _disagg_output_dt(shapedic, kinds, oq.imtls, poes_disagg)
    out = numpy.zeros(len(sitecol), dt)
    realizations = numpy.array(dstore['full_lt'].get_realizations())
    iml4 = dstore['iml4'][:]
    best_rlzs = dstore['best_rlzs'][:]
    arr = {kind: dstore['disagg/' + kind][:] for kind in kinds}
    for sid, lon, lat, rec in zip(sitecol.sids, sitecol.lons, sitecol.lats,
                                  out):
        rlzs = realizations[best_rlzs[sid]]
        rec['site_id'] = sid
        rec['lon'] = lon
        rec['lat'] = lat
        rec['lon_bins'] = edges[2][sid]
        rec['lat_bins'] = edges[3][sid]
        for m, imt in enumerate(oq.imtls):
            ws = numpy.array([rlz.weight[imt] for rlz in rlzs])
            ws /= ws.sum()  # normalize to 1
            for p, poe in enumerate(poes_disagg):
                for kind in kinds:
                    key = '%s-%s-%s' % (kind, imt, poe)
                    rec[key] = arr[kind][sid, m, p] @ ws
                rec['iml-%s-%s' % (imt, poe)] = iml4[sid, m, p]
    return ArrayWrapper(
        out,
        dict(mag=edges[0],
             dist=edges[1],
             eps=edges[-2],
             trt=numpy.array(encode(edges[-1]))))
Ejemplo n.º 6
0
def extract_exposure_metadata(dstore, what):
    """
    Extract the loss categories and the tags of the exposure.
    Use it as /extract/exposure_metadata
    """
    dic = {}
    dic1, dic2 = dstore['assetcol/tagcol'].__toh5__()
    dic.update(dic1)
    dic.update(dic2)
    if 'asset_risk' in dstore:
        dic['multi_risk'] = sorted(
            set(dstore['asset_risk'].dtype.names) -
            set(dstore['assetcol/array'].dtype.names))
    names = [
        name for name in dstore['assetcol/array'].dtype.names
        if name.startswith(('value-', 'number',
                            'occupants_')) and not name.endswith('_None')
    ]
    return ArrayWrapper(numpy.array(names), dic)
Ejemplo n.º 7
0
def extract_disagg_layer(dstore, what):
    """
    Extract a disaggregation layer containing all sites and outputs
    Example:
    http://127.0.0.1:8800/v1/calc/30/extract/disagg_layer?
    """
    qdict = parse(what)
    oq = dstore['oqparam']
    if 'kind' in qdict:
        kinds = qdict['kind']
    else:
        kinds = list(oq.disagg_outputs or disagg.pmf_map)
    sitecol = dstore['sitecol']
    poes_disagg = oq.poes_disagg or (None, )
    edges, shapedic = disagg.get_edges_shapedic(oq, sitecol,
                                                dstore['source_mags'])
    dt = _disagg_output_dt(shapedic, kinds, oq.imtls, poes_disagg)
    out = numpy.zeros(len(sitecol), dt)
    try:
        best_rlzs = dstore['best_rlzs']
    except KeyError:
        best_rlzs = numpy.zeros((len(sitecol), shapedic['Z']), U16)
    for sid, lon, lat, rec in zip(sitecol.sids, sitecol.lons, sitecol.lats,
                                  out):
        rec['site_id'] = sid
        rec['lon'] = lon
        rec['lat'] = lat
        rec['rlz_id'] = rlzs = best_rlzs[sid]
        rec['lon_bins'] = edges[2][sid]
        rec['lat_bins'] = edges[3][sid]
        for kind in kinds:
            for imt in oq.imtls:
                for p, poe in enumerate(poes_disagg):
                    for rlz in rlzs:
                        key = '%s-%s-%s' % (kind, imt, poe)
                        label = 'disagg/rlz-%d-%s-sid-%d-poe-%s/%s' % (
                            rlz, imt, sid, p, kind)
                        rec[key] = dstore[label][()]
    return ArrayWrapper(
        out,
        dict(mag=edges[0], dist=edges[1], eps=edges[-2],
             trt=encode(edges[-1])))
Ejemplo n.º 8
0
def extract_agg_curves(dstore, what):
    """
    Aggregate loss curves from the ebrisk calculator:

    /extract/agg_curves?
    kind=stats&absolute=1&loss_type=occupants&tagname=occupancy&tagvalue=RES

    Returns an array of shape (P, S, T...) or (P, R, T...)
    """
    info = get_info(dstore)
    qdic = parse(what, info)
    k = qdic['k']  # rlz or stat index
    [l] = qdic['loss_type']  # loss type index
    if qdic['rlzs']:
        kinds = ['rlz-%d' % r for r in k]
        arr = dstore['agg_curves-rlzs'][:, k, l]  # shape P, T...
        rps = dstore.get_attr('agg_curves-rlzs', 'return_periods')
    else:
        kinds = list(info['stats'])
        arr = dstore['agg_curves-stats'][:, k, l]  # shape P, T...
        rps = dstore.get_attr('agg_curves-stats', 'return_periods')
    tagnames = qdic.get('tagname', [])
    if set(tagnames) != set(info['tagnames']):
        raise ValueError('Expected tagnames=%s, got %s' %
                         (info['tagnames'], tagnames))
    tagvalues = qdic.get('tagvalue', [])
    if qdic['absolute'] == [1]:
        pass
    elif qdic['absolute'] == [0]:
        aggname = '_'.join(['agg'] + tagnames)
        evalue = dstore['exposed_values/' + aggname][l]  # shape T...
        arr /= evalue
    else:
        raise ValueError('"absolute" must be 0 or 1 in %s' % what)
    attrs = dict(shape_descr=['return_period', 'kind'] + tagnames)
    attrs['return_period'] = [numpy.nan] + list(rps)
    attrs['kind'] = ['?'] + kinds
    for tagname, tagvalue in zip(tagnames, tagvalues):
        attrs[tagname] = [tagvalue]
    return ArrayWrapper(arr, attrs)
Ejemplo n.º 9
0
def extract_rupture_info(dstore, what):
    """
    Extract some information about the ruptures, including the boundary.
    Example:
    http://127.0.0.1:8800/v1/calc/30/extract/rupture_info?min_mag=6
    """
    qdict = parse(what)
    if 'min_mag' in qdict:
        [min_mag] = qdict['min_mag']
    else:
        min_mag = 0
    oq = dstore['oqparam']

    dtlist = [('rup_id', U32), ('occurrence_rate', F32), ('multiplicity', U16),
              ('mag', F32), ('centroid_lon', F32), ('centroid_lat', F32),
              ('centroid_depth', F32), ('trt', '<S50'), ('strike', F32),
              ('dip', F32), ('rake', F32)]

    rows = []
    boundaries = []
    for rgetter in getters.gen_rgetters(dstore):
        proxies = rgetter.get_proxies(min_mag)
        rup_data = RuptureData(rgetter.trt, rgetter.samples,
                               rgetter.rlzs_by_gsim)
        for r in rup_data.to_array(proxies):
            coords = ['%.5f %.5f' % xyz[:2] for xyz in zip(*r['boundaries'])]
            coordset = sorted(set(coords))
            if len(coordset) < 4:  # degenerate to line
                boundaries.append('LINESTRING(%s)' % ', '.join(coordset))
            else:  # good polygon
                boundaries.append('POLYGON((%s))' % ', '.join(coords))

            rows.append((r['rup_id'], r['occurrence_rate'], r['multiplicity'],
                         r['mag'], r['lon'], r['lat'], r['depth'], rgetter.trt,
                         r['strike'], r['dip'], r['rake']))

    arr = numpy.array(rows, dtlist)
    geoms = gzip.compress('\n'.join(boundaries).encode('utf-8'))
    return ArrayWrapper(
        arr, dict(investigation_time=oq.investigation_time, boundaries=geoms))
Ejemplo n.º 10
0
def extract_asset_risk(dstore, what):
    """
    Extract an array of assets + risk fields, optionally filtered by tag.
    Use it as /extract/asset_risk?taxonomy=RC&taxonomy=MSBC&occupancy=RES
    """
    qdict = parse(what)
    dic = {}
    dic1, dic2 = dstore['assetcol/tagcol'].__toh5__()
    dic.update(dic1)
    dic.update(dic2)
    arr = dstore['asset_risk'][()]
    names = list(arr.dtype.names)
    for i, name in enumerate(names):
        if name == 'id':
            names[i] = 'asset_id'  # for backward compatibility
    arr.dtype.names = names
    for tag, vals in qdict.items():
        cond = numpy.zeros(len(arr), bool)
        for val in vals:
            tagidx, = numpy.where(dic[tag] == val)
            cond |= arr[tag] == tagidx
        arr = arr[cond]
    return ArrayWrapper(arr, dic)
Ejemplo n.º 11
0
def extract_disagg(dstore, what):
    """
    Extract a disaggregation output
    Example:
    http://127.0.0.1:8800/v1/calc/30/extract/
    disagg?kind=Mag_Dist&imt=PGA&poe_id=0&site_id=1&rlz=0
    """
    qdict = parse(what)
    label = qdict['kind'][0]
    imt = qdict['imt'][0]
    poe_idx = int(qdict['poe_id'][0])
    sid = int(qdict['site_id'][0])
    rlz = (int(qdict['rlz'][0])
           if 'rlz' in qdict else 0 if len(dstore['weights']) == 1 else None)
    dset = disagg_output(dstore, imt, sid, poe_idx, rlz)
    matrix = dset[label][()]

    # adapted from the nrml_converters
    disag_tup = tuple(label.split('_'))
    if disag_tup == ('Mag', 'Lon', 'Lat'):
        matrix = numpy.swapaxes(matrix, 0, 1)
        matrix = numpy.swapaxes(matrix, 1, 2)
        disag_tup = ('Lon', 'Lat', 'Mag')

    axis = [dset.attrs[v.lower() + '_bin_edges'] for v in disag_tup]
    # compute axis mid points
    axis = [(ax[:-1] + ax[1:]) / 2. if ax.dtype == float else ax
            for ax in axis]
    values = None
    if len(axis) == 1:
        values = numpy.array([axis[0], matrix.flatten()]).T
    else:
        grids = numpy.meshgrid(*axis, indexing='ij')
        values = [g.flatten() for g in grids]
        values.append(matrix.flatten())
        values = numpy.array(values).T
    return ArrayWrapper(values, qdict)
Ejemplo n.º 12
0
def extract_curves(dstore, what, tot):
    """
    Porfolio loss curves from the ebrisk calculator:

    /extract/tot_curves?
    kind=stats&absolute=1&loss_type=occupants

    Returns an array of shape (P, S) or (P, R)
    """
    info = get_info(dstore)
    qdic = parse(what, info)
    k = qdic['k']  # rlz or stat index
    [l] = qdic['loss_type']  # loss type index
    tup = (slice(None), k, l)
    if qdic['rlzs']:
        kinds = ['rlz-%d' % r for r in k]
        arr = dstore[tot + 'curves-rlzs'][tup]  # shape P, R
        units = dstore.get_attr(tot + 'curves-rlzs', 'units')
        rps = dstore.get_attr(tot + 'curves-rlzs', 'return_periods')
    else:
        kinds = list(info['stats'])
        arr = dstore[tot + 'curves-stats'][tup]  # shape P, S
        units = dstore.get_attr(tot + 'curves-stats', 'units')
        rps = dstore.get_attr(tot + 'curves-stats', 'return_periods')
    if qdic['absolute'] == [1]:
        pass
    elif qdic['absolute'] == [0]:
        evalue = dstore['exposed_values/agg'][l]
        arr /= evalue
    else:
        raise ValueError('"absolute" must be 0 or 1 in %s' % what)
    attrs = dict(shape_descr=['return_period', 'kind'])
    attrs['return_period'] = list(rps)
    attrs['kind'] = kinds
    attrs['units'] = units  # used by the QGIS plugin
    return ArrayWrapper(arr, attrs)
Ejemplo n.º 13
0
def extract_gsims_by_trt(dstore, what):
    """
    Extract the dictionary gsims_by_trt
    """
    return ArrayWrapper((), dstore['full_lt'].gsim_lt.values)
Ejemplo n.º 14
0
def extract_oqparam(dstore, dummy):
    """
    Extract job parameters as a JSON npz. Use it as /extract/oqparam
    """
    return ArrayWrapper((), {'json': json.dumps(vars(dstore['oqparam']))})
Ejemplo n.º 15
0
def disaggregation(sources,
                   site,
                   imt,
                   iml,
                   gsim_by_trt,
                   truncation_level,
                   n_epsilons,
                   mag_bin_width,
                   dist_bin_width,
                   coord_bin_width,
                   source_filter=filters.nofilter,
                   filter_distance='rjb'):
    """
    Compute "Disaggregation" matrix representing conditional probability of an
    intensity mesaure type ``imt`` exceeding, at least once, an intensity
    measure level ``iml`` at a geographical location ``site``, given rupture
    scenarios classified in terms of:

    - rupture magnitude
    - Joyner-Boore distance from rupture surface to site
    - longitude and latitude of the surface projection of a rupture's point
      closest to ``site``
    - epsilon: number of standard deviations by which an intensity measure
      level deviates from the median value predicted by a GSIM, given the
      rupture parameters
    - rupture tectonic region type

    In other words, the disaggregation matrix allows to compute the probability
    of each scenario with the specified properties (e.g., magnitude, or the
    magnitude and distance) to cause one or more exceedences of a given hazard
    level.

    For more detailed information about the disaggregation, see for instance
    "Disaggregation of Seismic Hazard", Paolo Bazzurro, C. Allin Cornell,
    Bulletin of the Seismological Society of America, Vol. 89, pp. 501-520,
    April 1999.

    :param sources:
        Seismic source model, as for
        :mod:`PSHA <openquake.hazardlib.calc.hazard_curve>` calculator it
        should be an iterator of seismic sources.
    :param site:
        :class:`~openquake.hazardlib.site.Site` of interest to calculate
        disaggregation matrix for.
    :param imt:
        Instance of :mod:`intensity measure type <openquake.hazardlib.imt>`
        class.
    :param iml:
        Intensity measure level. A float value in units of ``imt``.
    :param gsim_by_trt:
        Tectonic region type to GSIM objects mapping.
    :param truncation_level:
        Float, number of standard deviations for truncation of the intensity
        distribution.
    :param n_epsilons:
        Integer number of epsilon histogram bins in the result matrix.
    :param mag_bin_width:
        Magnitude discretization step, width of one magnitude histogram bin.
    :param dist_bin_width:
        Distance histogram discretization step, in km.
    :param coord_bin_width:
        Longitude and latitude histograms discretization step,
        in decimal degrees.
    :param source_filter:
        Optional source-site filter function. See
        :mod:`openquake.hazardlib.calc.filters`.

    :returns:
        A tuple of two items. First is itself a tuple of bin edges information
        for (in specified order) magnitude, distance, longitude, latitude,
        epsilon and tectonic region types.

        Second item is 6d-array representing the full disaggregation matrix.
        Dimensions are in the same order as bin edges in the first item
        of the result tuple. The matrix can be used directly by pmf-extractor
        functions.
    """
    trts = sorted(set(src.tectonic_region_type for src in sources))
    trt_num = dict((trt, i) for i, trt in enumerate(trts))
    rlzs_by_gsim = {gsim_by_trt[trt]: [0] for trt in trts}
    by_trt = groupby(sources, operator.attrgetter('tectonic_region_type'))
    bdata = {}
    sitecol = SiteCollection([site])
    iml2 = ArrayWrapper(numpy.array([[iml]]),
                        dict(imts=[imt], poes_disagg=[None], rlzi=0))
    for trt, srcs in by_trt.items():
        cmaker = ContextMaker(trt, rlzs_by_gsim,
                              source_filter.integration_distance,
                              {'filter_distance': filter_distance})
        contexts.RuptureContext.temporal_occurrence_model = (
            srcs[0].temporal_occurrence_model)
        rupdata = contexts.RupData(cmaker, sitecol).from_srcs(srcs)
        bdata[trt] = collect_bin_data(rupdata, sitecol, cmaker, iml2,
                                      truncation_level, n_epsilons)
    if sum(len(bd.mags) for bd in bdata.values()) == 0:
        warnings.warn(
            'No ruptures have contributed to the hazard at site %s' % site,
            RuntimeWarning)
        return None, None

    min_mag = min(bd.mags.min() for bd in bdata.values())
    max_mag = max(bd.mags.max() for bd in bdata.values())
    mag_bins = mag_bin_width * numpy.arange(
        int(numpy.floor(min_mag / mag_bin_width)),
        int(numpy.ceil(max_mag / mag_bin_width) + 1))

    min_dist = min(bd.dists.min() for bd in bdata.values())
    max_dist = max(bd.dists.max() for bd in bdata.values())
    dist_bins = dist_bin_width * numpy.arange(
        int(numpy.floor(min_dist / dist_bin_width)),
        int(numpy.ceil(max_dist / dist_bin_width) + 1))

    bb = (min(bd.lons.min() for bd in bdata.values()),
          min(bd.lats.min() for bd in bdata.values()),
          max(bd.lons.max() for bd in bdata.values()),
          max(bd.lats.max() for bd in bdata.values()))
    lon_bins, lat_bins = lon_lat_bins(bb, coord_bin_width)

    eps_bins = numpy.linspace(-truncation_level, truncation_level,
                              n_epsilons + 1)

    bin_edges = (mag_bins, dist_bins, lon_bins, lat_bins, eps_bins)
    matrix = numpy.zeros(
        (len(mag_bins) - 1, len(dist_bins) - 1, len(lon_bins) - 1,
         len(lat_bins) - 1, len(eps_bins) - 1, len(trts)))
    for trt in bdata:
        dic = build_disagg_matrix(bdata[trt], bin_edges)
        if dic:  # (poe, imt, rlzi) -> matrix
            [mat] = dic.values()
            matrix[..., trt_num[trt]] = mat
    return bin_edges + (trts, ), matrix
Ejemplo n.º 16
0
 def get(self, what):
     """
     :param what: what to extract
     :returns: an ArrayWrapper instance
     """
     return ArrayWrapper.from_(extract(self.dstore, what))