Ejemplo n.º 1
0
def get_composite_source_model(oqparam, full_lt=None, h5=None):
    """
    Parse the XML and build a complete composite source model in memory.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param full_lt:
        a :class:`openquake.commonlib.logictree.FullLogicTree` or None
    :param h5:
         an open hdf5.File where to store the source info
    """
    if full_lt is None:
        full_lt = get_full_lt(oqparam)
    csm = get_csm(oqparam, full_lt, h5)
    grp_ids = csm.get_grp_ids()
    gidx = {tuple(arr): i for i, arr in enumerate(grp_ids)}
    if oqparam.is_event_based():
        csm.init_serials(oqparam.ses_seed)
    data = {}  # src_id -> row
    mags = AccumDict(accum=set())  # trt -> mags
    wkts = []
    ns = 0
    for sg in csm.src_groups:
        if hasattr(sg, 'mags'):  # UCERF
            mags[sg.trt].update('%.2f' % mag for mag in sg.mags)
        for src in sg:
            ns += 1
            if src.source_id in data:
                num_sources = data[src.source_id][3] + 1
            else:
                num_sources = 1
            row = [
                src.source_id, gidx[tuple(src.grp_ids)], src.code, num_sources,
                0, 0, 0, src.checksum, src.serial
            ]
            wkts.append(src._wkt)  # this is a bit slow but okay
            data[src.source_id] = row
            if hasattr(src, 'mags'):  # UCERF
                continue  # already accounted for in sg.mags
            elif hasattr(src, 'data'):  # nonparametric
                srcmags = ['%.2f' % item[0].mag for item in src.data]
            else:
                srcmags = [
                    '%.2f' % item[0]
                    for item in src.get_annual_occurrence_rates()
                ]
            mags[sg.trt].update(srcmags)

    logging.info('There are %d sources with %d unique IDs', ns, len(data))
    if h5:
        hdf5.create(h5, 'source_info', source_info_dt)  # avoid hdf5 damned bug
        h5['source_wkt'] = numpy.array(wkts, hdf5.vstr)
        for trt in mags:
            h5['source_mags/' + trt] = numpy.array(sorted(mags[trt]))
        h5['grp_ids'] = grp_ids
    csm.gsim_lt.check_imts(oqparam.imtls)
    csm.source_info = data
    if os.environ.get('OQ_CHECK_INPUT'):
        source.check_complex_faults(csm.get_sources())
    return csm
Ejemplo n.º 2
0
def get_composite_source_model(oqparam, h5=None):
    """
    Parse the XML and build a complete composite source model in memory.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param h5:
         an open hdf5.File where to store the source info
    """
    full_lt = get_full_lt(oqparam)
    if oqparam.csm_cache and not oqparam.is_ucerf():
        csm = _get_csm_cached(oqparam, full_lt, h5)
    else:
        csm = get_csm(oqparam, full_lt, h5)
    grp_ids = csm.get_grp_ids()
    gidx = {tuple(arr): i for i, arr in enumerate(grp_ids)}
    if oqparam.is_event_based():
        csm.init_serials(oqparam.ses_seed)
    data = {}  # src_id -> row
    mags = AccumDict(accum=set())  # trt -> mags
    wkts = []
    ns = -1
    for sg in csm.src_groups:
        if hasattr(sg, 'mags'):  # UCERF
            mags[sg.trt].update('%.2f' % mag for mag in sg.mags)
        for src in sg:
            if src.source_id in data:
                multiplicity = data[src.source_id][MULTIPLICITY] + 1
            else:
                multiplicity = 1
                ns += 1
            src.gidx = gidx[tuple(src.grp_ids)]
            row = [src.source_id, src.gidx, src.code,
                   multiplicity, 0, 0, 0, src.checksum, src.serial or ns,
                   full_lt.trti[src.tectonic_region_type]]
            wkts.append(src._wkt)  # this is a bit slow but okay
            data[src.source_id] = row
            if hasattr(src, 'mags'):  # UCERF
                continue  # already accounted for in sg.mags
            elif hasattr(src, 'data'):  # nonparametric
                srcmags = ['%.2f' % item[0].mag for item in src.data]
            else:
                srcmags = ['%.2f' % item[0] for item in
                           src.get_annual_occurrence_rates()]
            mags[sg.trt].update(srcmags)
    logging.info('There are %d sources', ns + 1)
    if h5:
        attrs = dict(atomic=any(grp.atomic for grp in csm.src_groups))
        # avoid hdf5 damned bug by creating source_info in advance
        hdf5.create(h5, 'source_info', source_info_dt, attrs=attrs)
        h5['source_wkt'] = numpy.array(wkts, hdf5.vstr)
        for trt in mags:
            h5['source_mags/' + trt] = numpy.array(sorted(mags[trt]))
        h5['grp_ids'] = grp_ids
    csm.gsim_lt.check_imts(oqparam.imtls)
    csm.source_info = data  # src_id -> row
    if os.environ.get('OQ_CHECK_INPUT'):
        source.check_complex_faults(csm.get_sources())
    return csm
Ejemplo n.º 3
0
def get_composite_source_model(oqparam, h5=None):
    """
    Parse the XML and build a complete composite source model in memory.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param h5:
         an open hdf5.File where to store the source info
    """
    logging.info('Reading the CompositeSourceModel')
    full_lt = get_full_lt(oqparam)
    if oqparam.cachedir and not oqparam.is_ucerf():
        csm = _get_cachedir(oqparam, full_lt, h5)
    else:
        csm = get_csm(oqparam, full_lt, h5)
    et_ids = csm.get_et_ids()
    logging.info('%d effective smlt realization(s)', len(full_lt.sm_rlzs))
    data = {}  # src_id -> row
    mags_by_trt = csm.get_mags_by_trt()
    wkts = []
    lens = []
    for sg in csm.src_groups:
        for src in sg:
            lens.append(len(src.et_ids))
            row = [
                src.source_id, src.grp_id, src.code, 0, 0, 0,
                full_lt.trti[src.tectonic_region_type], 0
            ]
            wkts.append(src._wkt)
            data[src.id] = row
    logging.info('There are %d groups and %d sources with len(et_ids)=%.2f',
                 len(csm.src_groups), sum(len(sg) for sg in csm.src_groups),
                 numpy.mean(lens))
    if h5:
        attrs = dict(atomic=any(grp.atomic for grp in csm.src_groups))
        # avoid hdf5 damned bug by creating source_info in advance
        hdf5.create(h5, 'source_info', source_info_dt, attrs=attrs)
        h5['source_wkt'] = numpy.array(wkts, hdf5.vstr)
        h5['et_ids'] = et_ids
        for trt in mags_by_trt:
            h5['source_mags/' + trt] = numpy.array(mags_by_trt[trt])
        oqparam.maximum_distance.interp(mags_by_trt)
    csm.gsim_lt.check_imts(oqparam.imtls)
    csm.source_info = data  # src_id -> row
    if os.environ.get('OQ_CHECK_INPUT'):
        source.check_complex_faults(csm.get_sources())
    return csm
Ejemplo n.º 4
0
def _check_csm(csm, oqparam, h5):
    # checks
    csm.gsim_lt.check_imts(oqparam.imtls)

    srcs = csm.get_sources()
    if not srcs:
        raise RuntimeError('All sources were discarded!?')

    if os.environ.get('OQ_CHECK_INPUT'):
        source.check_complex_faults(srcs)

    # build a smart SourceFilter
    try:
        sitecol = get_site_collection(oqparam, h5)  # already stored
    except Exception:  # missing sites.csv in test_case_1_ruptures
        sitecol = None
    csm.sitecol = sitecol
    if sitecol is None:
        return
    srcfilter = SourceFilter(sitecol, oqparam.maximum_distance)
    logging.info('Checking the sources bounding box')
    lons = []
    lats = []
    for src in srcs:
        try:
            box = srcfilter.get_enlarged_box(src)
        except BBoxError as exc:
            logging.error(exc)
            continue
        lons.append(box[0])
        lats.append(box[1])
        lons.append(box[2])
        lats.append(box[3])
    if cross_idl(*(list(sitecol.lons) + lons)):
        lons = numpy.array(lons) % 360
    else:
        lons = numpy.array(lons)
    bbox = (lons.min(), min(lats), lons.max(), max(lats))
    if bbox[2] - bbox[0] > 180:
        raise BBoxError(
            'The bounding box of the sources is larger than half '
            'the globe: %d degrees' % (bbox[2] - bbox[0]))
    sids = sitecol.within_bbox(bbox)
    if len(sids) == 0:
        raise RuntimeError('All sources were discarded!?')
Ejemplo n.º 5
0
def get_composite_source_model(oqparam, h5=None):
    """
    Parse the XML and build a complete composite source model in memory.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param h5:
         an open hdf5.File where to store the source info
    """
    full_lt = get_full_lt(oqparam)
    if oqparam.cachedir and not oqparam.is_ucerf():
        csm = _get_cachedir(oqparam, full_lt, h5)
    else:
        csm = get_csm(oqparam, full_lt, h5)
    et_ids = csm.get_et_ids()
    logging.info('%d effective smlt realization(s)', len(full_lt.sm_rlzs))
    grp_id = {tuple(arr): i for i, arr in enumerate(et_ids)}
    data = {}  # src_id -> row
    mags = AccumDict(accum=set())  # trt -> mags
    wkts = []
    lens = []
    for sg in csm.src_groups:
        if hasattr(sg, 'mags'):  # UCERF
            mags[sg.trt].update('%.2f' % mag for mag in sg.mags)
        for src in sg:
            lens.append(len(src.et_ids))
            src.grp_id = grp_id[tuple(src.et_ids)]
            row = [
                src.source_id, src.grp_id, src.code, 0, 0, 0, src.id,
                full_lt.trti[src.tectonic_region_type]
            ]
            wkts.append(src._wkt)  # this is a bit slow but okay
            data[src.source_id] = row
            if hasattr(src, 'mags'):  # UCERF
                continue  # already accounted for in sg.mags
            elif hasattr(src, 'data'):  # nonparametric
                srcmags = ['%.2f' % item[0].mag for item in src.data]
            else:
                srcmags = [
                    '%.2f' % item[0]
                    for item in src.get_annual_occurrence_rates()
                ]
            mags[sg.trt].update(srcmags)
    logging.info('There are %d groups and %d sources with len(et_ids)=%.1f',
                 len(csm.src_groups), sum(len(sg) for sg in csm.src_groups),
                 numpy.mean(lens))
    if h5:
        attrs = dict(atomic=any(grp.atomic for grp in csm.src_groups))
        # avoid hdf5 damned bug by creating source_info in advance
        hdf5.create(h5, 'source_info', source_info_dt, attrs=attrs)
        h5['source_wkt'] = numpy.array(wkts, hdf5.vstr)
        h5['et_ids'] = et_ids
        mags_by_trt = {}
        for trt in mags:
            mags_by_trt[trt] = arr = numpy.array(sorted(mags[trt]))
            h5['source_mags/' + trt] = arr
        oqparam.maximum_distance.interp(mags_by_trt)
    csm.gsim_lt.check_imts(oqparam.imtls)
    csm.source_info = data  # src_id -> row
    if os.environ.get('OQ_CHECK_INPUT'):
        source.check_complex_faults(csm.get_sources())
    return csm