Exemplo n.º 1
0
def export_agg_losses_ebr(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    loss_types = dstore.get_attr('composite_risk_model', 'loss_types')
    L = len(loss_types)
    name, ext = export.keyfunc(ekey)
    agg_losses = dstore[name]
    has_rup_data = 'ruptures' in dstore
    extra_list = [('magnitude', F32), ('centroid_lon', F32),
                  ('centroid_lat', F32),
                  ('centroid_depth', F32)] if has_rup_data else []
    oq = dstore['oqparam']
    dtlist = ([('event_id', U64), ('rup_id', U32),
               ('year', U32)] + extra_list + oq.loss_dt_list())
    elt_dt = numpy.dtype(dtlist)
    csm_info = dstore['csm_info']
    rlzs_assoc = csm_info.get_rlzs_assoc()
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    for sm_id, rlzs in rlzs_assoc.rlzs_by_smodel.items():
        # populate rup_data and event_by_eid
        rup_data = {}
        event_by_grp = {}  # grp_id -> eid -> event
        for grp_id in csm_info.get_grp_ids(sm_id):
            event_by_grp[grp_id] = event_by_eid = {}
            try:
                events = dstore['events/grp-%02d' % grp_id]
            except KeyError:
                continue
            for event in events:
                event_by_eid[event['eid']] = event
            if has_rup_data:
                rup_data.update(get_rup_data(calc.get_ruptures(dstore,
                                                               grp_id)))

        for rlz in rlzs:
            rlzname = 'rlz-%03d' % rlz.ordinal
            if rlzname not in agg_losses:
                continue
            data = agg_losses[rlzname].value
            eids = data['eid']
            losses = data['loss']
            eids_, years, serials = get_eids_years_serials(event_by_grp, eids)
            elt = numpy.zeros(len(eids), elt_dt)
            elt['event_id'] = eids_
            elt['year'] = years
            if rup_data:
                copy_to(elt, rup_data, serials)
            for i, ins in enumerate(
                ['', '_ins'] if oq.insured_losses else ['']):
                for l, loss_type in enumerate(loss_types):
                    elt[loss_type + ins][:] = losses[:, l + L * i]
            elt.sort(order=['year', 'event_id'])
            dest = dstore.build_fname('agg_losses', rlz, 'csv')
            writer.save(elt, dest)
    return writer.getsaved()
Exemplo n.º 2
0
def export_agg_losses_ebr(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    loss_types = dstore.get_attr('composite_risk_model', 'loss_types')
    name, ext = export.keyfunc(ekey)
    agg_losses = dstore[name]
    oq = dstore['oqparam']
    dtlist = [('event_tag', (numpy.string_, 100)), ('year', U32)
              ] + oq.loss_dt_list()
    elt_dt = numpy.dtype(dtlist)
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    sm_ids = sorted(rlzs_assoc.rlzs_by_smodel)
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    for sm_id in sm_ids:
        rlzs = rlzs_assoc.rlzs_by_smodel[sm_id]
        try:
            events = dstore['events/sm-%04d' % sm_id]
        except KeyError:
            continue
        if not len(events):
            continue
        for rlz in rlzs:
            dest = dstore.build_fname('agg_losses', rlz, 'csv')
            eids = set()
            rlzname = 'rlz-%03d' % rlz.ordinal
            if rlzname not in agg_losses:
                continue
            for loss_type in agg_losses[rlzname]:
                dset = agg_losses['%s/%s' % (rlzname, loss_type)]
                eids.update(dset['eid'])
            eids = sorted(eids)
            rlz_events = events[eids]
            eid2idx = dict(zip(eids, range(len(eids))))
            elt = numpy.zeros(len(eids), elt_dt)
            elt['event_tag'] = build_etags(rlz_events)
            elt['year'] = rlz_events['year']
            for loss_type in loss_types:
                elt_lt = elt[loss_type]
                if oq.insured_losses:
                    elt_lt_ins = elt[loss_type + '_ins']
                key = 'rlz-%03d/%s' % (rlz.ordinal, loss_type)
                if key not in agg_losses:  # nothing was saved for this key
                    continue
                data = agg_losses[key].value
                for i, eid in numpy.ndenumerate(data['eid']):
                    idx = eid2idx[eid]
                    if oq.insured_losses:
                        elt_lt[idx] = data['loss'][i, 0]
                        elt_lt_ins[idx] = data['loss'][i, 1]
                    else:
                        elt_lt[idx] = data['loss'][i]
            elt.sort(order='event_tag')
            writer.save(elt, dest)
    return writer.getsaved()
Exemplo n.º 3
0
def export_agg_losses_ebr(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    if 'ruptures' not in dstore:
        logging.warning('There are no ruptures in the datastore')
        return []
    name, ext = export.keyfunc(ekey)
    agg_losses = dstore['losses_by_event']
    has_rup_data = 'ruptures' in dstore
    extra_list = [('magnitude', F32),
                  ('centroid_lon', F32),
                  ('centroid_lat', F32),
                  ('centroid_depth', F32)] if has_rup_data else []
    oq = dstore['oqparam']
    lti = oq.lti
    dtlist = ([('event_id', U64), ('rup_id', U32), ('year', U32),
               ('rlzi', U16)] + extra_list + oq.loss_dt_list())
    elt_dt = numpy.dtype(dtlist)
    elt = numpy.zeros(len(agg_losses), elt_dt)
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    events = dstore['events'].value
    events_by_rupid = collections.defaultdict(list)
    for event in events:
        rupid = event['eid'] // TWO32
        events_by_rupid[rupid].append(event)
    year_of = year_dict(events['eid'], oq.investigation_time, oq.ses_seed)
    rup_data = {}
    event_by_eid = {}  # eid -> event
    # populate rup_data and event_by_eid
    # TODO: avoid reading the events twice
    for rgetter in getters.gen_rupture_getters(dstore):
        ruptures = rgetter.get_ruptures()
        for ebr in ruptures:
            for event in events_by_rupid[ebr.serial]:
                event_by_eid[event['eid']] = event
        if has_rup_data:
            rup_data.update(get_rup_data(ruptures))
    for r, row in enumerate(agg_losses):
        rec = elt[r]
        event = event_by_eid[row['eid']]
        rec['event_id'] = eid = event['eid']
        rec['year'] = year_of[eid]
        rec['rlzi'] = row['rlzi']
        if rup_data:
            rec['rup_id'] = rup_id = event['eid'] // TWO32
            (rec['magnitude'], rec['centroid_lon'], rec['centroid_lat'],
             rec['centroid_depth']) = rup_data[rup_id]
        for lt, i in lti.items():
            rec[lt] = row['loss'][i]
    elt.sort(order=['year', 'event_id', 'rlzi'])
    dest = dstore.build_fname('agg_losses', 'all', 'csv')
    writer.save(elt, dest)
    return writer.getsaved()
Exemplo n.º 4
0
def export_all_loss_ratios(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    loss_types = dstore.get_attr('composite_risk_model', 'loss_types')
    name, ext = export.keyfunc(ekey)
    ass_losses = dstore[name]
    assetcol = dstore['assetcol']
    oq = dstore['oqparam']
    dtlist = [('event_tag', (numpy.string_, 100)), ('year', U32),
              ('aid', U32)] + oq.loss_dt_list()
    elt_dt = numpy.dtype(dtlist)
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    sm_id, eid = get_sm_id_eid(ekey[0])
    if sm_id is None:
        return []
    sm_id, eid = int(sm_id), int(eid)
    sm_ids = [sm_id]
    zero = [0, 0] if oq.insured_losses else 0
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    for sm_id in sm_ids:
        rlzs = rlzs_assoc.rlzs_by_smodel[sm_id]
        try:
            event = dstore['events/sm-%04d' % sm_id][eid]
        except KeyError:
            continue
        [event_tag] = build_etags([event])
        for rlz in rlzs:
            exportname = 'losses-sm=%04d-eid=%d' % (sm_id, eid)
            dest = dstore.build_fname(exportname, rlz, 'csv')
            losses_by_aid = AccumDict()
            rlzname = 'rlz-%03d' % rlz.ordinal
            for loss_type in ass_losses[rlzname]:
                data = get_array(ass_losses['%s/%s' % (rlzname, loss_type)],
                                 eid=eid)
                losses_by_aid += group_by_aid(data, loss_type)
            elt = numpy.zeros(len(losses_by_aid), elt_dt)
            elt['event_tag'] = event_tag
            elt['year'] = event['year']
            elt['aid'] = sorted(losses_by_aid)
            for i, aid in numpy.ndenumerate(elt['aid']):
                for loss_type in loss_types:
                    value = assetcol[int(aid)].value(loss_type, oq.time_event)
                    loss = value * losses_by_aid[aid].get(loss_type, zero)
                    if oq.insured_losses:
                        elt[loss_type][i] = loss[0]
                        elt[loss_type + '_ins'][i] = loss[1]
                    else:
                        elt[loss_type][i] = loss

            elt.sort(order='event_tag')
            writer.save(elt, dest)
    return writer.getsaved()
Exemplo n.º 5
0
def export_agg_losses_ebr(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    if 'ruptures' not in dstore:
        logging.warning('There are no ruptures in the datastore')
        return []
    name, ext = export.keyfunc(ekey)
    agg_losses = dstore['losses_by_event']
    has_rup_data = 'ruptures' in dstore
    extra_list = [('magnitude', F32),
                  ('centroid_lon', F32),
                  ('centroid_lat', F32),
                  ('centroid_depth', F32)] if has_rup_data else []
    oq = dstore['oqparam']
    lti = oq.lti
    dtlist = ([('event_id', U64), ('rup_id', U32), ('year', U32)]
              + extra_list + oq.loss_dt_list())
    elt_dt = numpy.dtype(dtlist)
    elt = numpy.zeros(len(agg_losses), elt_dt)
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    events = dstore['events'].value
    events_by_rupid = collections.defaultdict(list)
    for event in events:
        rupid = event['id'] // TWO32
        events_by_rupid[rupid].append(event)
    year_of = year_dict(events['id'], oq.investigation_time, oq.ses_seed)
    rup_data = {}
    event_by_eid = {}  # eid -> event
    # populate rup_data and event_by_eid
    # TODO: avoid reading the events twice
    for rgetter in getters.gen_rupture_getters(dstore):
        ruptures = rgetter.get_ruptures()
        for ebr in ruptures:
            for event in events_by_rupid[ebr.serial]:
                event_by_eid[event['id']] = event
        if has_rup_data:
            rup_data.update(get_rup_data(ruptures))
    for r, row in enumerate(agg_losses):
        rec = elt[r]
        event = event_by_eid[row['eid']]
        rec['event_id'] = eid = event['id']
        rec['year'] = year_of[eid]
        if rup_data:
            rec['rup_id'] = rup_id = event['id'] // TWO32
            (rec['magnitude'], rec['centroid_lon'], rec['centroid_lat'],
             rec['centroid_depth']) = rup_data[rup_id]
        for lt, i in lti.items():
            rec[lt] = row['loss'][i]
    elt.sort(order=['year', 'event_id'])
    dest = dstore.build_fname('elt', '', 'csv')
    writer.save(elt, dest)
    return writer.getsaved()
Exemplo n.º 6
0
def export_agg_losses_ebr(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    if 'ruptures' not in dstore:
        logging.warn('There are no ruptures in the datastore')
        return []
    name, ext = export.keyfunc(ekey)
    agg_losses = dstore[name]
    has_rup_data = 'ruptures' in dstore
    extra_list = [('magnitude', F32),
                  ('centroid_lon', F32),
                  ('centroid_lat', F32),
                  ('centroid_depth', F32)] if has_rup_data else []
    oq = dstore['oqparam']
    lti = oq.lti
    dtlist = ([('event_id', U64), ('rup_id', U32), ('year', U32),
               ('rlzi', U16)] + extra_list + oq.loss_dt_list())
    elt_dt = numpy.dtype(dtlist)
    elt = numpy.zeros(len(agg_losses), elt_dt)
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    the_events = dstore['events'].value
    all_events = group_array(the_events, 'grp_id')
    rup_data = {}
    event_by_eid = {}  # eid -> event
    # populate rup_data and event_by_eid
    ruptures_by_grp = calc.get_ruptures_by_grp(dstore)
    # TODO: avoid reading the events twice
    for grp_id, events in all_events.items():
        for event in events:
            event_by_eid[event['eid']] = event
        if has_rup_data:
            ruptures = ruptures_by_grp.get(grp_id, [])
            rup_data.update(get_rup_data(ruptures))
    for r, row in enumerate(agg_losses):
        rec = elt[r]
        event = event_by_eid[row['eid']]
        rec['event_id'] = event['eid']
        rec['year'] = event['year']
        rec['rlzi'] = row['rlzi']
        if rup_data:
            rec['rup_id'] = rup_id = event['rup_id']
            (rec['magnitude'], rec['centroid_lon'], rec['centroid_lat'],
             rec['centroid_depth']) = rup_data[rup_id]
        for lt, i in lti.items():
            rec[lt] = row['loss'][i]
    elt.sort(order=['year', 'event_id', 'rlzi'])
    dest = dstore.build_fname('agg_losses', 'all', 'csv')
    writer.save(elt, dest)
    return writer.getsaved()