def execute(self):
     """
     Compute risk from GMFs or ruptures depending on what is stored
     """
     if 'gmf_data' not in self.datastore:  # start from ruptures
         srcfilter = self.src_filter()
         smap = parallel.Starmap(start_ebrisk, h5=self.datastore.hdf5)
         smap.monitor.save('srcfilter', srcfilter)
         smap.monitor.save('crmodel', self.crmodel)
         smap.monitor.save('rlz_id', self.rlzs)
         for rg in getters.get_rupture_getters(
                 self.datastore, self.oqparam.concurrent_tasks,
                 srcfilter=srcfilter):
             smap.submit((rg, self.param))
         smap.reduce(self.agg_dicts)
         gmf_bytes = self.datastore['gmf_info']['gmfbytes']
         if len(gmf_bytes) == 0:
             raise RuntimeError(
                 'No GMFs were generated, perhaps they were '
                 'all below the minimum_intensity threshold')
         logging.info(
             'Produced %s of GMFs', general.humansize(gmf_bytes.sum()))
     else:  # start from GMFs
         eids = self.datastore['gmf_data/eid'][:]
         logging.info('Processing {:_d} rows of gmf_data'.format(len(eids)))
         self.datastore.swmr_on()  # crucial!
         smap = parallel.Starmap(
             event_based_risk, self.gen_args(eids), h5=self.datastore.hdf5)
         smap.monitor.save('assets', self.assetcol.to_dframe())
         smap.monitor.save('crmodel', self.crmodel)
         smap.monitor.save('rlz_id', self.rlzs)
         smap.reduce(self.agg_dicts)
     return 1
Exemple #2
0
def export_ruptures_csv(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    oq = dstore['oqparam']
    if 'scenario' in oq.calculation_mode:
        return []
    dest = dstore.export_path('ruptures.csv')
    header = ('rupid multiplicity mag centroid_lon centroid_lat '
              'centroid_depth trt strike dip rake boundary').split()
    rows = []
    for rgetter in get_rupture_getters(dstore):
        rups = list(rgetter)
        rup_data = calc.RuptureData(rgetter.trt, rgetter.rlzs_by_gsim)
        for r in rup_data.to_array(rups):
            rows.append(
                (r['rup_id'], r['multiplicity'], r['mag'],
                 r['lon'], r['lat'], r['depth'],
                 rgetter.trt, r['strike'], r['dip'], r['rake'],
                 r['boundary']))
    rows.sort()  # by rupture serial
    comment = 'investigation_time=%s, ses_per_logic_tree_path=%s' % (
        oq.investigation_time, oq.ses_per_logic_tree_path)
    writers.write_csv(dest, rows, header=header, sep='\t', comment=comment)
    return [dest]
Exemple #3
0
def view_rupture(token, dstore):
    """
    Show a rupture with its geometry
    """
    rup_id = int(token.split(':')[1])
    slc = slice(rup_id, rup_id + 1)
    dicts = []
    for rgetter in get_rupture_getters(dstore, slc=slc):
        dicts.append(rgetter.get_rupdict())
    return str(dicts)
Exemple #4
0
    def execute(self):
        oq = self.oqparam
        dstore = self.datastore
        self.set_param()
        self.offset = 0
        if oq.hazard_calculation_id:  # from ruptures
            dstore.parent = datastore.read(oq.hazard_calculation_id)
        elif hasattr(self, 'csm'):  # from sources
            self.build_events_from_sources()
            if (oq.ground_motion_fields is False
                    and oq.hazard_curves_from_gmfs is False):
                return {}
        elif 'rupture_model' not in oq.inputs:
            logging.warning(
                'There is no rupture_model, the calculator will just '
                'import data without performing any calculation')
            fake = logictree.FullLogicTree.fake()
            dstore['full_lt'] = fake  # needed to expose the outputs
            dstore['weights'] = [1.]
            return {}
        else:  # scenario
            self._read_scenario_ruptures()
            if (oq.ground_motion_fields is False
                    and oq.hazard_curves_from_gmfs is False):
                return {}

        if oq.ground_motion_fields:
            imts = oq.get_primary_imtls()
            nrups = len(dstore['ruptures'])
            base.create_gmf_data(dstore, imts, oq.get_sec_imts())
            dstore.create_dset('gmf_data/sigma_epsilon', sig_eps_dt(oq.imtls))
            dstore.create_dset('gmf_data/time_by_rup',
                               time_dt, (nrups, ),
                               fillvalue=None)

        # compute_gmfs in parallel
        nr = len(dstore['ruptures'])
        logging.info('Reading {:_d} ruptures'.format(nr))
        rgetters = get_rupture_getters(dstore,
                                       oq.concurrent_tasks * 1.25,
                                       srcfilter=self.srcfilter)
        allargs = [(rgetter, self.param) for rgetter in rgetters]
        dstore.swmr_on()
        smap = parallel.Starmap(self.core_task.__func__,
                                allargs,
                                h5=dstore.hdf5)
        smap.monitor.save('srcfilter', self.srcfilter)
        acc = smap.reduce(self.agg_dicts, self.acc0())
        if 'gmf_data' not in dstore:
            return acc
        if oq.ground_motion_fields:
            with self.monitor('saving avg_gmf', measuremem=True):
                self.save_avg_gmf()
        return acc
Exemple #5
0
def export_agg_losses_ebr(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    if 'ruptures' not in dstore:
        logging.warn('There are no ruptures in the datastore')
        return []
    name, ext = export.keyfunc(ekey)
    agg_losses = dstore['losses_by_event']
    has_rup_data = 'ruptures' in dstore
    extra_list = [('magnitude', F32), ('centroid_lon', F32),
                  ('centroid_lat', F32),
                  ('centroid_depth', F32)] if has_rup_data else []
    oq = dstore['oqparam']
    lti = oq.lti
    dtlist = ([('event_id', U64), ('rup_id', U32), ('year', U32),
               ('rlzi', U16)] + extra_list + oq.loss_dt_list())
    elt_dt = numpy.dtype(dtlist)
    elt = numpy.zeros(len(agg_losses), elt_dt)
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    events = dstore['events'].value
    events_by_rupid = collections.defaultdict(list)
    for event in events:
        rupid = event['eid'] // TWO32
        events_by_rupid[rupid].append(event)
    year_of = year_dict(events['eid'], oq.investigation_time, oq.ses_seed)
    rup_data = {}
    event_by_eid = {}  # eid -> event
    # populate rup_data and event_by_eid
    # TODO: avoid reading the events twice
    for rgetter in getters.get_rupture_getters(dstore):
        for ebr in rgetter:
            for event in events_by_rupid[ebr.serial]:
                event_by_eid[event['eid']] = event
        if has_rup_data:
            rup_data.update(get_rup_data(rgetter))
    for r, row in enumerate(agg_losses):
        rec = elt[r]
        event = event_by_eid[row['eid']]
        rec['event_id'] = eid = event['eid']
        rec['year'] = year_of[eid]
        rec['rlzi'] = row['rlzi']
        if rup_data:
            rec['rup_id'] = rup_id = event['eid'] // TWO32
            (rec['magnitude'], rec['centroid_lon'], rec['centroid_lat'],
             rec['centroid_depth']) = rup_data[rup_id]
        for lt, i in lti.items():
            rec[lt] = row['loss'][i]
    elt.sort(order=['year', 'event_id', 'rlzi'])
    dest = dstore.build_fname('agg_losses', 'all', 'csv')
    writer.save(elt, dest)
    return writer.getsaved()
Exemple #6
0
def export_gmf_scenario_csv(ekey, dstore):
    what = ekey[0].split('/')
    if len(what) == 1:
        raise ValueError('Missing "/rup-\d+"')
    oq = dstore['oqparam']
    csm_info = dstore['csm_info']
    rlzs_assoc = csm_info.get_rlzs_assoc()
    num_ruptures = len(dstore['ruptures'])
    imts = list(oq.imtls)
    mo = re.match('rup-(\d+)$', what[1])
    if mo is None:
        raise ValueError(
            "Invalid format: %r does not match 'rup-(\d+)$'" % what[1])
    ridx = int(mo.group(1))
    assert 0 <= ridx < num_ruptures, ridx
    # for scenario there is an unique grp_id=0
    [[ebr]] = get_rupture_getters(dstore, slice(ridx, ridx + 1))
    rlzs_by_gsim = rlzs_assoc.get_rlzs_by_gsim(0)
    min_iml = calc.fix_minimum_intensity(oq.minimum_intensity, imts)
    sitecol = dstore['sitecol'].complete
    getter = GmfGetter(rlzs_by_gsim, [ebr], sitecol, oq, min_iml)
    getter.init()
    eids = (numpy.concatenate([
        eids for eids in ebr.get_eids_by_rlz(rlzs_by_gsim).values()]))
    sids = getter.computers[0].sids
    hazardr = getter.get_hazard()
    rlzs = rlzs_assoc.realizations
    fields = ['eid-%03d' % eid for eid in eids]
    dt = numpy.dtype([(f, F32) for f in fields])
    mesh = numpy.zeros(len(sids), [('lon', F64), ('lat', F64)])
    mesh['lon'] = sitecol.lons[sids]
    mesh['lat'] = sitecol.lats[sids]
    writer = writers.CsvWriter(fmt='%.5f')
    for rlzi in range(len(rlzs)):
        hazard = hazardr[rlzi]
        for imti, imt in enumerate(imts):
            gmfs = numpy.zeros(len(sids), dt)
            for s, sid in enumerate(sids):
                for rec in hazard[sid]:
                    event = 'eid-%03d' % rec['eid']
                    gmfs[s][event] = rec['gmv'][imti]
            dest = dstore.build_fname(
                'gmf', 'rup-%s-rlz-%s-%s' % (ebr.serial, rlzi, imt), 'csv')
            data = util.compose_arrays(mesh, gmfs)
            writer.save(data, dest)
    return writer.getsaved()
Exemple #7
0
def export_ruptures_xml(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    fmt = ekey[-1]
    oq = dstore['oqparam']
    num_ses = oq.ses_per_logic_tree_path
    mesh = get_mesh(dstore['sitecol'])
    ruptures_by_grp = {}
    for rgetter in get_rupture_getters(dstore):
        ebrs = [ebr.export(mesh, rgetter.rlzs_by_gsim, num_ses)
                for ebr in rgetter]
        if ebrs:
            ruptures_by_grp[rgetter.grp_id] = ebrs
    dest = dstore.export_path('ses.' + fmt)
    writer = hazard_writers.SESXMLWriter(dest)
    writer.serialize(ruptures_by_grp, oq.investigation_time)
    return [dest]
Exemple #8
0
def export_ruptures_xml(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    fmt = ekey[-1]
    oq = dstore['oqparam']
    events = group_array(dstore['events'][()], 'rup_id')
    ruptures_by_grp = AccumDict(accum=[])
    for rgetter in get_rupture_getters(dstore):
        ebrs = []
        for proxy in rgetter.get_proxies():
            events_by_ses = group_array(events[proxy['id']], 'ses_id')
            ebr = proxy.to_ebr(rgetter.trt)
            ebrs.append(ebr.export(events_by_ses))
        ruptures_by_grp[rgetter.trt_smr].extend(ebrs)
    dest = dstore.export_path('ses.' + fmt)
    writer = hazard_writers.SESXMLWriter(dest)
    writer.serialize(ruptures_by_grp, oq.investigation_time)
    return [dest]
Exemple #9
0
 def get_rupture_getters(self):
     """
     :returns: a list of RuptureGetters
     """
     dstore = (self.datastore.parent
               if self.datastore.parent else self.datastore)
     hdf5cache = dstore.hdf5cache()
     with hdf5.File(hdf5cache, 'r+') as cache:
         if 'rupgeoms' not in cache:
             dstore.hdf5.copy('rupgeoms', cache)
     rgetters = get_rupture_getters(dstore,
                                    split=self.oqparam.concurrent_tasks,
                                    hdf5cache=hdf5cache)
     num_events = self.E if hasattr(self, 'E') else len(dstore['events'])
     num_ruptures = len(dstore['ruptures'])
     logging.info('Found {:,d} ruptures and {:,d} events'.format(
         num_ruptures, num_events))
     if self.datastore.parent:
         self.datastore.parent.close()
     return rgetters
    def run_calc(self):
        """
        Run a calculation and return results (reinvented from openquake.calculators.base)
        """

        with self.calculator._monitor:
            self.calculator._monitor.username = ''
            try:
                # Pre-execute setups
                self.calculator.pre_execute()

                #self.calculator.datastore.swmr_on()
                oq = self.calculator.oqparam
                dstore = self.calculator.datastore
                self.calculator.set_param()
                self.calculator.offset = 0

                # Source model
                print('self.__dict__ = ')
                print(self.calculator.__dict__)
                if oq.hazard_calculation_id:  # from ruptures
                    dstore.parent = self.calculator.datastore.read(
                        oq.hazard_calculation_id)
                elif hasattr(self.calculator, 'csm'):  # from sources
                    self.calculator_build_events_from_sources()
                    #self.calculator.build_events_from_sources()
                    if (oq.ground_motion_fields is False
                            and oq.hazard_curves_from_gmfs is False):
                        return {}
                elif 'rupture_model' not in oq.inputs:
                    logging.warning(
                        'There is no rupture_model, the calculator will just '
                        'import data without performing any calculation')
                    fake = logictree.FullLogicTree.fake()
                    dstore['full_lt'] = fake  # needed to expose the outputs
                    dstore['weights'] = [1.]
                    return {}
                else:  # scenario
                    self.calculator._read_scenario_ruptures()
                    if (oq.ground_motion_fields is False
                            and oq.hazard_curves_from_gmfs is False):
                        return {}

                # Intensity measure models
                if oq.ground_motion_fields:
                    imts = oq.get_primary_imtls()
                    nrups = len(dstore['ruptures'])
                    base.create_gmf_data(dstore, imts, oq.get_sec_imts())
                    dstore.create_dset('gmf_data/sigma_epsilon',
                                       getters.sig_eps_dt(oq.imtls))
                    dstore.create_dset('gmf_data/time_by_rup',
                                       getters.time_dt, (nrups, ),
                                       fillvalue=None)

                # Prepare inputs for GmfGetter
                nr = len(dstore['ruptures'])
                logging.info('Reading {:_d} ruptures'.format(nr))
                rgetters = getters.get_rupture_getters(
                    dstore,
                    oq.concurrent_tasks * 1.25,
                    srcfilter=self.calculator.srcfilter)
                args = [(rgetter, self.calculator.param)
                        for rgetter in rgetters]
                mon = performance.Monitor()
                mon.version = version
                mon.config = config
                rcvr = 'tcp://%s:%s' % (config.dbserver.listen,
                                        config.dbserver.receiver_ports)
                skt = zeromq.Socket(rcvr, zeromq.zmq.PULL, 'bind').__enter__()
                mon.backurl = 'tcp://%s:%s' % (config.dbserver.host, skt.port)
                mon = mon.new(operation='total ' +
                              self.calculator.core_task.__func__.__name__,
                              measuremem=True)
                mon.weight = getattr(args[0], 'weight',
                                     1.)  # used in task_info
                mon.task_no = 1  # initialize the task number
                args += (mon, )

                self.args = args
                self.mon = mon
                self.dstore = dstore

            finally:
                print('FetchOpenQuake: OpenQuake Hazard Calculator defined.')