Exemple #1
0
 def test_get_far_away_parameter(self):
     oqparam = mock.Mock()
     oqparam.base_path = '/'
     oqparam.maximum_distance = 100
     oqparam.sites = [(1.0, 0)]
     oqparam.inputs = dict(site_model=sitemodel())
     with mock.patch('logging.warn') as warn:
         readinput.get_site_collection(oqparam)
     # check that the warning was raised
     self.assertEqual(
         warn.call_args[0][0], 'The site parameter associated to '
         '<Latitude=0.000000, Longitude=1.000000, Depth=0.0000> '
         'came from a distance of 111 km!')
Exemple #2
0
    def read_exposure_sitecol(self):
        """
        Read the exposure (if any) and then the site collection, possibly
        extracted from the exposure.
        """
        inputs = self.oqparam.inputs

        if 'gmfs' in inputs and self.oqparam.sites:
            haz_sitecol = self.sitecol = readinput.get_site_collection(
                self.oqparam)
        if 'scenario_' in self.oqparam.calculation_mode:
            self.gmfs = get_gmfs(self)
            haz_sitecol = self.sitecol
        if 'exposure' in inputs:
            logging.info('Reading the exposure')
            with self.monitor('reading exposure', autoflush=True):
                self.exposure = readinput.get_exposure(self.oqparam)
                self.sitecol, self.assets_by_site = (
                    readinput.get_sitecol_assets(self.oqparam, self.exposure))
                self.cost_types = self.exposure.cost_types
                self.taxonomies = numpy.array(
                    sorted(self.exposure.taxonomies), '|S100')
            num_assets = self.count_assets()
            if self.datastore.parent:
                haz_sitecol = self.datastore.parent['sitecol']
            elif 'gmfs' in inputs:
                pass  # haz_sitecol is already defined
            # TODO: think about the case hazard_curves in inputs
            else:
                haz_sitecol = None
            if haz_sitecol is not None and haz_sitecol != self.sitecol:
                with self.monitor('assoc_assets_sites'):
                    self.sitecol, self.assets_by_site = \
                        self.assoc_assets_sites(haz_sitecol.complete)
                ok_assets = self.count_assets()
                num_sites = len(self.sitecol)
                logging.warn('Associated %d assets to %d sites, %d discarded',
                             ok_assets, num_sites, num_assets - ok_assets)
        elif (self.datastore.parent and 'exposure' in
              OqParam.from_(self.datastore.parent.attrs).inputs):
            logging.info('Re-using the already imported exposure')
        else:  # no exposure
            logging.info('Reading the site collection')
            with self.monitor('reading site collection', autoflush=True):
                self.sitecol = readinput.get_site_collection(self.oqparam)

        # save mesh and asset collection
        self.save_mesh()
        if hasattr(self, 'assets_by_site'):
            self.assetcol = riskinput.build_asset_collection(
                self.assets_by_site, self.oqparam.time_event)
Exemple #3
0
 def test_get_far_away_parameter(self):
     oqparam = mock.Mock()
     oqparam.hazard_calculation_id = None
     oqparam.base_path = '/'
     oqparam.maximum_distance = 100
     oqparam.max_site_model_distance = 5
     oqparam.sites = [(1.0, 0, 0), (2.0, 0, 0)]
     oqparam.inputs = dict(site_model=sitemodel())
     with mock.patch('logging.warn') as warn:
         readinput.get_site_collection(oqparam)
     # check that the warning was raised
     self.assertEqual(
         warn.call_args[0],
         ('Association to %d km from site (%s %s)', 222, 2.0, 0.0))
Exemple #4
0
 def test_get_far_away_parameter(self):
     oqparam = mock.Mock()
     oqparam.gsim = valid.GSIM['ToroEtAl2002SHARE']()
     oqparam.hazard_calculation_id = None
     oqparam.base_path = '/'
     oqparam.maximum_distance = 100
     oqparam.max_site_model_distance = 5
     oqparam.region_grid_spacing = None
     oqparam.sites = [(1.0, 0, 0), (2.0, 0, 0)]
     oqparam.inputs = dict(site_model=sitemodel())
     with mock.patch('logging.warn') as warn:
         readinput.get_site_collection(oqparam)
     # check that the warning was raised
     self.assertEqual(len(warn.call_args), 2)
Exemple #5
0
 def test_get_far_away_parameter(self):
     oqparam = mock.Mock()
     oqparam.base_path = '/'
     oqparam.maximum_distance = 100
     oqparam.sites = [(1.0, 0)]
     oqparam.inputs = dict(site_model=sitemodel())
     with mock.patch('logging.warn') as warn:
         readinput.get_site_collection(oqparam)
     # check that the warning was raised
     self.assertEqual(
         warn.call_args[0][0],
         'The site parameter associated to '
         '<Latitude=0.000000, Longitude=1.000000, Depth=0.0000> '
         'came from a distance of 111 km!')
Exemple #6
0
def save_gmfs(calculator):
    """
    :param calculator: a scenario_risk/damage or event_based_risk calculator
    :returns: a pair (eids, R) where R is the number of realizations
    """
    dstore = calculator.datastore
    oq = calculator.oqparam
    logging.info('Reading gmfs from file')
    if oq.inputs['gmfs'].endswith('.csv'):
        # TODO: check if import_gmfs can be removed
        eids, num_rlzs, calculator.gmdata = import_gmfs(
            dstore, oq.inputs['gmfs'], calculator.sitecol.complete.sids)
        save_gmdata(calculator, calculator.R)
    else:  # XML
        eids, gmfs = readinput.eids, readinput.gmfs
    E = len(eids)
    calculator.eids = eids
    if hasattr(oq, 'number_of_ground_motion_fields'):
        if oq.number_of_ground_motion_fields != E:
            raise RuntimeError('Expected %d ground motion fields, found %d' %
                               (oq.number_of_ground_motion_fields, E))
    else:  # set the number of GMFs from the file
        oq.number_of_ground_motion_fields = E
    # NB: save_gmfs redefine oq.sites in case of GMFs from XML or CSV
    if oq.inputs['gmfs'].endswith('.xml'):
        haz_sitecol = readinput.get_site_collection(oq)
        R, N, E, I = gmfs.shape
        save_gmf_data(dstore, haz_sitecol, gmfs[:, haz_sitecol.sids], eids)
Exemple #7
0
 def setUp(self):
     self.oqparam = mock.Mock()
     self.oqparam.inputs = {}
     self.oqparam.sites = [(0, 0), (0, 1)]
     self.oqparam.imtls = {'PGA': None}
     self.oqparam.number_of_ground_motion_fields = 3
     self.sitecol = readinput.get_site_collection(self.oqparam)
Exemple #8
0
 def pre_execute(self):
     """
     parse the logic tree and source model input
     """
     self.sitecol = readinput.get_site_collection(self.oqparam)
     self.save_mesh()
     self.gsim_lt = readinput.get_gsim_lt(self.oqparam, [DEFAULT_TRT])
     self.smlt = readinput.get_source_model_lt(self.oqparam)
     parser = source.SourceModelParser(
         UCERFSourceConverter(self.oqparam.investigation_time,
                              self.oqparam.rupture_mesh_spacing))
     [self.source] = parser.parse_sources(
         self.oqparam.inputs["source_model"])
     branches = sorted(self.smlt.branches.items())
     min_mag, max_mag = self.source.min_mag, None
     source_models = []
     for ordinal, (name, branch) in enumerate(branches):
         tm = source.TrtModel(DEFAULT_TRT, [], min_mag, max_mag,
                              ordinal, eff_ruptures=-1)
         sm = source.SourceModel(
             name, branch.weight, [name], [tm], self.gsim_lt, ordinal, 1)
         source_models.append(sm)
     self.csm = source.CompositeSourceModel(
         self.smlt, source_models, set_weight=False)
     self.rup_data = {}
     self.infos = []
Exemple #9
0
def _info(name, filtersources, weightsources):
    if name in base.calculators:
        print(textwrap.dedent(base.calculators[name].__doc__.strip()))
    elif name == 'gsims':
        for gs in gsim.get_available_gsims():
            print(gs)
    elif name.endswith('.xml'):
        print(nrml.read(name).to_str())
    elif name.endswith(('.ini', '.zip')):
        oqparam = readinput.get_oqparam(name)
        if 'exposure' in oqparam.inputs:
            expo = readinput.get_exposure(oqparam)
            sitecol, assets_by_site = readinput.get_sitecol_assets(
                oqparam, expo)
        elif filtersources or weightsources:
            sitecol = readinput.get_site_collection(oqparam)
        else:
            sitecol = None
        if 'source_model_logic_tree' in oqparam.inputs:
            print('Reading the source model...')
            if weightsources:
                sp = source.SourceFilterWeighter
            elif filtersources:
                sp = source.SourceFilter
            else:
                sp = source.BaseSourceProcessor  # do nothing
            csm = readinput.get_composite_source_model(oqparam, sitecol, sp)
            assoc = csm.get_rlzs_assoc()
            dstore = datastore.Fake(vars(oqparam),
                                    rlzs_assoc=assoc,
                                    composite_source_model=csm,
                                    sitecol=sitecol)
            _print_info(dstore, filtersources, weightsources)
    else:
        print("No info for '%s'" % name)
Exemple #10
0
    def read_risk_data(self):
        """
        Read the exposure (if any), the risk model (if any) and then the
        site collection, possibly extracted from the exposure.
        """
        oq = self.oqparam
        with self.monitor('reading site collection', autoflush=True):
            haz_sitecol = readinput.get_site_collection(oq)
        if haz_sitecol is not None:
            logging.info('Read %d hazard site(s)', len(haz_sitecol))
        oq_hazard = (self.datastore.parent['oqparam']
                     if self.datastore.parent else None)
        if 'exposure' in oq.inputs:
            self.read_exposure()
            self.load_riskmodel()  # must be called *after* read_exposure
            num_assets = self.count_assets()
            if self.datastore.parent:
                haz_sitecol = self.datastore.parent['sitecol']
            if haz_sitecol is not None and haz_sitecol != self.sitecol:
                with self.monitor('assoc_assets_sites', autoflush=True):
                    self.sitecol, self.assetcol = \
                        self.assoc_assets_sites(haz_sitecol.complete)
                ok_assets = self.count_assets()
                num_sites = len(self.sitecol)
                logging.warn('Associated %d assets to %d sites, %d discarded',
                             ok_assets, num_sites, num_assets - ok_assets)
        elif oq.job_type == 'risk':
            raise RuntimeError('Missing exposure_file in %(job_ini)s' %
                               oq.inputs)
        else:  # no exposure
            self.load_riskmodel()
            self.sitecol = haz_sitecol

        if oq_hazard:
            parent = self.datastore.parent
            if 'assetcol' in parent:
                check_time_event(oq, parent['assetcol'].time_events)
            if oq_hazard.time_event and oq_hazard.time_event != oq.time_event:
                raise ValueError(
                    'The risk configuration file has time_event=%s but the '
                    'hazard was computed with time_event=%s' %
                    (oq.time_event, oq_hazard.time_event))

        if self.oqparam.job_type == 'risk':
            taxonomies = set(self.taxonomies)

            # check that we are covering all the taxonomies in the exposure
            missing = taxonomies - set(self.riskmodel.taxonomies)
            if self.riskmodel and missing:
                raise RuntimeError('The exposure contains the taxonomies %s '
                                   'which are not in the risk model' % missing)

            # same check for the consequence models, if any
            consequence_models = riskmodels.get_risk_models(
                self.oqparam, 'consequence')
            for lt, cm in consequence_models.items():
                missing = taxonomies - set(cm)
                if missing:
                    raise ValueError('Missing consequenceFunctions for %s' %
                                     ' '.join(missing))
Exemple #11
0
 def test_get_far_away_parameter(self):
     oqparam = mock.Mock()
     oqparam.gsim = valid.GSIM['ToroEtAl2002SHARE']()
     oqparam.hazard_calculation_id = None
     oqparam.base_path = '/'
     oqparam.maximum_distance = 100
     oqparam.max_site_model_distance = 5
     oqparam.sites = [(1.0, 0, 0), (2.0, 0, 0)]
     oqparam.inputs = dict(site_model=sitemodel())
     with mock.patch('logging.warn') as warn:
         readinput.get_site_collection(oqparam)
     # check that the warning was raised
     self.assertEqual(
         warn.call_args[0],
         ('Association to (%.1f %.1f) from site #%d (%.1f %.1f) %d km', 0.0,
          0.0, 1, 2.0, 0.0, 222))
Exemple #12
0
    def test_one_rlz(self):
        oqparam = tests.get_oqparam('classical_job.ini')
        # the example has number_of_logic_tree_samples = 1
        sitecol = readinput.get_site_collection(oqparam)
        csm = readinput.get_composite_source_model(oqparam, sitecol)
        self.assertEqual(
            str(csm[0].gsim_lt), '''\
<GsimLogicTree
Active Shallow Crust,b1,SadighEtAl1997,w=0.5
Active Shallow Crust,b2,ChiouYoungs2008,w=0.5
Subduction Interface,b3,SadighEtAl1997,w=1.0>''')
        assoc = csm.get_rlzs_assoc(
            lambda trtmod: sum(src.count_ruptures() for src in trtmod.sources))
        [rlz] = assoc.realizations
        self.assertEqual(
            assoc.gsim_by_trt[rlz.ordinal], {
                'Subduction Interface': 'SadighEtAl1997',
                'Active Shallow Crust': 'ChiouYoungs2008'
            })
        # ignoring the end of the tuple, with the uid field
        self.assertEqual(rlz.ordinal, 0)
        self.assertEqual(rlz.sm_lt_path, ('b1', 'b5', 'b8'))
        self.assertEqual(rlz.gsim_lt_path, ('b2', 'b3'))
        self.assertEqual(rlz.weight, 1.)
        self.assertEqual(
            str(assoc),
            "<RlzsAssoc(2)\n0,SadighEtAl1997: ['<0,b1_b5_b8,b2_b3,w=1.0>']\n"
            "1,ChiouYoungs2008: ['<0,b1_b5_b8,b2_b3,w=1.0>']>")
Exemple #13
0
    def pre_execute(self):
        """
        parse the logic tree and source model input
        """
        self.sitecol = readinput.get_site_collection(self.oqparam)
        self.gsim_lt = readinput.get_gsim_lt(self.oqparam, [DEFAULT_TRT])
        self.smlt = readinput.get_source_model_lt(self.oqparam)
        parser = source.SourceModelParser(
            UCERFClassicalSourceConverter(self.oqparam.investigation_time,
                                          self.oqparam.rupture_mesh_spacing))
        [self.src_group] = parser.parse_src_groups(
            self.oqparam.inputs["source_model"])
        [src] = self.src_group
        branches = sorted(self.smlt.branches.items())
        source_models = []
        num_gsim_paths = self.gsim_lt.get_num_paths()
        for ordinal, (name, branch) in enumerate(branches):
            sg = copy.copy(self.src_group)
            sg.id = ordinal

            # Update the event set
            src.branch_id = branch.value
            src.idx_set = src.build_idx_set()
            sm = source.SourceModel(
                name, branch.weight, [name], [sg], num_gsim_paths, ordinal, 1)
            source_models.append(sm)
        self.csm = source.CompositeSourceModel(
            self.gsim_lt, self.smlt, source_models, set_weight=False)
        self.rlzs_assoc = self.csm.info.get_rlzs_assoc()
        self.rup_data = {}
        self.num_tiles = 1
        self.infos = {}
Exemple #14
0
 def pre_execute(self):
     """
     Associate the assets to the sites and build the riskinputs.
     """
     oq = self.oqparam
     if oq.insured_losses:
         raise ValueError(
             'insured_losses are not supported for classical_risk')
     if 'hazard_curves' in oq.inputs:  # read hazard from file
         haz_sitecol = readinput.get_site_collection(oq)
         self.datastore['poes/grp-00'] = readinput.pmap
         self.save_params()
         self.load_riskmodel()
         self.read_exposure(haz_sitecol)  # define .assets_by_site
         self.datastore['sitecol'] = self.sitecol
         self.datastore['assetcol'] = self.assetcol
         self.datastore['csm_info'] = fake = source.CompositionInfo.fake()
         self.rlzs_assoc = fake.get_rlzs_assoc()
         self.before_export()  # save 'realizations' dataset
     else:  # compute hazard or read it from the datastore
         super().pre_execute()
         if 'poes' not in self.datastore:  # when building short report
             return
     weights = [rlz.weight for rlz in self.rlzs_assoc.realizations]
     self.param = dict(stats=oq.risk_stats(), weights=weights)
     self.riskinputs = self.build_riskinputs('poe')
     self.A = len(self.assetcol)
     self.L = len(self.riskmodel.loss_types)
     self.I = oq.insured_losses + 1
     self.S = len(oq.risk_stats())
Exemple #15
0
 def pre_execute(self):
     """
     parse the logic tree and source model input
     """
     self.sitecol = readinput.get_site_collection(self.oqparam)
     self.save_mesh()
     self.gsim_lt = readinput.get_gsim_lt(self.oqparam, [DEFAULT_TRT])
     self.smlt = readinput.get_source_model_lt(self.oqparam)
     parser = source.SourceModelParser(
         UCERFSourceConverter(self.oqparam.investigation_time,
                              self.oqparam.rupture_mesh_spacing))
     [self.source
      ] = parser.parse_sources(self.oqparam.inputs["source_model"])
     branches = sorted(self.smlt.branches.items())
     min_mag, max_mag = self.source.min_mag, None
     source_models = []
     num_gsim_paths = self.gsim_lt.get_num_paths()
     for ordinal, (name, branch) in enumerate(branches):
         tm = source.TrtModel(DEFAULT_TRT, [],
                              min_mag,
                              max_mag,
                              ordinal,
                              eff_ruptures=-1)
         sm = source.SourceModel(name, branch.weight, [name], [tm],
                                 num_gsim_paths, ordinal, 1)
         source_models.append(sm)
     self.csm = source.CompositeSourceModel(self.gsim_lt,
                                            self.smlt,
                                            source_models,
                                            set_weight=False)
     self.rup_data = {}
     self.infos = []
Exemple #16
0
    def test(self):
        fname = general.gettemp('''\
<?xml version="1.0" encoding="utf-8"?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">

    <!-- Spectral Acceleration (SA) example -->
    <hazardCurves sourceModelTreePath="b1_b2_b4" gsimTreePath="b1_b2" investigationTime="50.0" IMT="SA" saPeriod="0.025" saDamping="5.0">
        <IMLs>5.0000e-03 7.0000e-03 1.3700e-02</IMLs>

        <hazardCurve>
            <gml:Point>
                <gml:pos>-122.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8728e-01 9.8266e-01 9.4957e-01</poEs>
        </hazardCurve>
        <hazardCurve>
            <gml:Point>
                <gml:pos>-123.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8727e-02 9.8265e-02 9.4956e-02</poEs>
        </hazardCurve>
    </hazardCurves>

    <!-- Basic example, using PGA as IMT -->
    <hazardCurves sourceModelTreePath="b1_b2_b3" gsimTreePath="b1_b7" investigationTime="50.0" IMT="PGA">
        <IMLs>5.0000e-03 7.0000e-03 1.3700e-02 3.3700e-02</IMLs>

        <hazardCurve>
            <gml:Point>
                <gml:pos>-122.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8728e-01 9.8226e-01 9.4947e-01 9.2947e-01</poEs>
        </hazardCurve>
        <hazardCurve>
            <gml:Point>
                <gml:pos>-123.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8728e-02 9.8216e-02 9.4945e-02 9.2947e-02</poEs>
        </hazardCurve>
    </hazardCurves>
</nrml>
''',
                                suffix='.xml')
        oqparam = object.__new__(oqvalidation.OqParam)
        oqparam.inputs = dict(hazard_curves=fname)
        sitecol = readinput.get_site_collection(oqparam)
        self.assertEqual(len(sitecol), 2)
        self.assertEqual(sorted(oqparam.hazard_imtls.items()),
                         [('PGA', [0.005, 0.007, 0.0137, 0.0337]),
                          ('SA(0.025)', [0.005, 0.007, 0.0137])])
        hcurves = readinput.pmap.convert(oqparam.imtls, 2)
        assert_allclose(
            hcurves['PGA'],
            numpy.array([[0.098728, 0.098216, 0.094945, 0.092947],
                         [0.98728, 0.98226, 0.94947, 0.92947]]))
        assert_allclose(
            hcurves['SA(0.025)'],
            numpy.array([[0.098727, 0.098265, 0.094956],
                         [0.98728, 0.98266, 0.94957]]))
Exemple #17
0
    def test_one_rlz(self):
        oqparam = tests.get_oqparam('classical_job.ini')
        # the example has number_of_logic_tree_samples = 1
        sitecol = readinput.get_site_collection(oqparam)
        csm = readinput.get_composite_source_model(oqparam, sitecol)

        # check the attributes of the groups are set
        [grp0, grp1] = csm.src_groups
        for grp in csm.src_groups:
            self.assertEqual(grp.src_interdep, 'indep')
            self.assertEqual(grp.rup_interdep, 'indep')

        self.assertEqual(
            repr(csm.gsim_lt), '''\
<GsimLogicTree
Active Shallow Crust,b1,SadighEtAl1997(),w=0.5
Active Shallow Crust,b2,ChiouYoungs2008(),w=0.5
Subduction Interface,b3,SadighEtAl1997(),w=1.0>''')
        assoc = csm.info.get_rlzs_assoc()
        [rlz] = assoc.realizations
        self.assertEqual(
            assoc.gsim_by_trt[rlz.ordinal], {
                'Subduction Interface': 'SadighEtAl1997()',
                'Active Shallow Crust': 'ChiouYoungs2008()'
            })
        # ignoring the end of the tuple, with the uid field
        self.assertEqual(rlz.ordinal, 0)
        self.assertEqual(rlz.sm_lt_path, ('b1', 'b4', 'b7'))
        self.assertEqual(rlz.gsim_lt_path, ('b2', 'b3'))
        self.assertEqual(rlz.weight, 1.)
        self.assertEqual(
            str(assoc), "<RlzsAssoc(size=2, rlzs=1)\n0,SadighEtAl1997(): "
            "[0]\n1,ChiouYoungs2008(): [0]>")
Exemple #18
0
def save_gmfs(calculator):
    """
    :param calculator: a scenario_risk/damage or event_based_risk calculator
    :returns: a pair (eids, R) where R is the number of realizations
    """
    dstore = calculator.datastore
    oq = calculator.oqparam
    logging.info('Reading gmfs from file')
    if oq.inputs['gmfs'].endswith('.csv'):
        # TODO: check if import_gmfs can be removed
        eids = import_gmfs(
            dstore, oq.inputs['gmfs'], calculator.sitecol.complete.sids)
    else:  # XML
        eids, gmfs = readinput.eids, readinput.gmfs
    E = len(eids)
    events = numpy.zeros(E, rupture.events_dt)
    events['id'] = eids
    calculator.eids = eids
    if hasattr(oq, 'number_of_ground_motion_fields'):
        if oq.number_of_ground_motion_fields != E:
            raise RuntimeError(
                'Expected %d ground motion fields, found %d' %
                (oq.number_of_ground_motion_fields, E))
    else:  # set the number of GMFs from the file
        oq.number_of_ground_motion_fields = E
    # NB: save_gmfs redefine oq.sites in case of GMFs from XML or CSV
    if oq.inputs['gmfs'].endswith('.xml'):
        haz_sitecol = readinput.get_site_collection(oq)
        N, E, M = gmfs.shape
        save_gmf_data(dstore, haz_sitecol, gmfs[haz_sitecol.sids],
                      oq.imtls, events)
Exemple #19
0
    def test_one_rlz(self):
        oqparam = tests.get_oqparam('classical_job.ini')
        # the example has number_of_logic_tree_samples = 1
        sitecol = readinput.get_site_collection(oqparam)
        csm = readinput.get_composite_source_model(oqparam, sitecol)
        self.assertEqual(str(csm[0].gsim_lt), '''\
<GsimLogicTree
Active Shallow Crust,b1,SadighEtAl1997,w=0.5
Active Shallow Crust,b2,ChiouYoungs2008,w=0.5
Subduction Interface,b3,SadighEtAl1997,w=1.0>''')
        assoc = csm.get_rlzs_assoc(
            lambda trtmod: sum(src.count_ruptures() for src in trtmod.sources))
        [rlz] = assoc.realizations
        self.assertEqual(assoc.gsim_by_trt[rlz.ordinal],
                         {'Subduction Interface': 'SadighEtAl1997',
                          'Active Shallow Crust': 'ChiouYoungs2008'})
        # ignoring the end of the tuple, with the uid field
        self.assertEqual(rlz.ordinal, 0)
        self.assertEqual(rlz.sm_lt_path, ('b1', 'b5', 'b8'))
        self.assertEqual(rlz.gsim_lt_path, ('b2', 'b3'))
        self.assertEqual(rlz.weight, 1.)
        self.assertEqual(
            str(assoc),
            "<RlzsAssoc(2)\n0,SadighEtAl1997: ['<0,b1_b5_b8,b2_b3,w=1.0>']\n"
            "1,ChiouYoungs2008: ['<0,b1_b5_b8,b2_b3,w=1.0>']>")
Exemple #20
0
    def read_risk_data(self):
        """
        Read the exposure (if any), the risk model (if any) and then the
        site collection, possibly extracted from the exposure.
        """
        oq = self.oqparam
        with self.monitor('reading site collection', autoflush=True):
            haz_sitecol = readinput.get_site_collection(oq)
        if haz_sitecol is not None:
            logging.info('Read %d hazard site(s)', len(haz_sitecol))

        oq_hazard = (self.datastore.parent['oqparam']
                     if self.datastore.parent else None)
        if 'exposure' in oq.inputs:
            self.read_exposure()
            self.load_riskmodel()  # must be called *after* read_exposure
            num_assets = self.count_assets()
            if self.datastore.parent:
                haz_sitecol = self.datastore.parent['sitecol']
            if haz_sitecol is not None and haz_sitecol != self.sitecol:
                with self.monitor('assoc_assets_sites'):
                    self.sitecol, self.assets_by_site = \
                        self.assoc_assets_sites(haz_sitecol.complete)
                ok_assets = self.count_assets()
                num_sites = len(self.sitecol)
                logging.warn('Associated %d assets to %d sites, %d discarded',
                             ok_assets, num_sites, num_assets - ok_assets)
        elif oq.job_type == 'risk':
            raise RuntimeError(
                'Missing exposure_file in %(job_ini)s' % oq.inputs)
        else:  # no exposure
            self.load_riskmodel()
            self.sitecol = haz_sitecol

        if oq_hazard:
            parent = self.datastore.parent
            if 'assetcol' in parent:
                check_time_event(oq, parent['assetcol'].time_events)
            if oq_hazard.time_event and oq_hazard.time_event != oq.time_event:
                raise ValueError(
                    'The risk configuration file has time_event=%s but the '
                    'hazard was computed with time_event=%s' % (
                        oq.time_event, oq_hazard.time_event))

        # asset collection
        if hasattr(self, 'assets_by_site'):
            self.assetcol = riskinput.AssetCollection(
                self.assets_by_site, self.cost_calculator, oq.time_event,
                time_events=hdf5.array_of_vstr(
                    sorted(self.exposure.time_events)))
        elif hasattr(self, '_assetcol'):
            self.assets_by_site = self.assetcol.assets_by_site()

        if self.oqparam.job_type == 'risk':
            # check that we are covering all the taxonomies in the exposure
            missing = set(self.taxonomies) - set(self.riskmodel.taxonomies)
            if self.riskmodel and missing:
                raise RuntimeError('The exposure contains the taxonomies %s '
                                   'which are not in the risk model' % missing)
Exemple #21
0
 def pre_execute(self):
     self.sitecol = readinput.get_site_collection(self.oqparam)
     self.gsim = readinput.get_gsim(self.oqparam)
     self.imts = readinput.get_imts(self.oqparam)
     self.rupture = readinput.get_rupture(self.oqparam)
     self.rupture_tags = [  # used in the export phase
         'tag%d' % i
         for i in range(self.oqparam.number_of_ground_motion_fields)]
Exemple #22
0
 def pre_execute(self):
     self.sitecol = readinput.get_site_collection(self.oqparam)
     [self.gsim] = readinput.get_gsims(self.oqparam)
     self.imts = readinput.get_imts(self.oqparam)
     self.rupture = readinput.get_rupture(self.oqparam)
     self.rupture_tags = [  # used in the export phase
         'tag%d' % i
         for i in range(self.oqparam.number_of_ground_motion_fields)]
Exemple #23
0
    def read_exposure_sitecol(self):
        """
        Read the exposure (if any) and then the site collection, possibly
        extracted from the exposure.
        """
        if 'exposure' in self.oqparam.inputs:
            logging.info('Reading the exposure')
            with self.monitor('reading exposure', autoflush=True):
                self.exposure = readinput.get_exposure(self.oqparam)
                self.sitecol, self.assets_by_site = (
                    readinput.get_sitecol_assets(self.oqparam, self.exposure))
                self.cost_types = self.exposure.cost_types
                self.taxonomies = numpy.array(
                    sorted(self.exposure.taxonomies), '|S100')
            num_assets = self.count_assets()
            mesh = readinput.get_mesh(self.oqparam)
            if self.datastore.parent:
                parent_mesh = self.datastore.parent['sitemesh'].value
                if mesh is None:
                    mesh = Mesh(parent_mesh['lon'], parent_mesh['lat'])
            if mesh is not None:
                sites = readinput.get_site_collection(self.oqparam, mesh)
                with self.monitor('assoc_assets_sites'):
                    self.sitecol, self.assets_by_site = \
                        self.assoc_assets_sites(sites)
                ok_assets = self.count_assets()
                num_sites = len(self.sitecol)
                logging.warn('Associated %d assets to %d sites, %d discarded',
                             ok_assets, num_sites, num_assets - ok_assets)

            if (self.is_stochastic and self.datastore.parent and
                    self.datastore.parent['sitecol'] != self.sitecol):
                logging.warn(
                    'The hazard sites are different from the risk sites %s!=%s'
                    % (self.datastore.parent['sitecol'], self.sitecol))
        else:  # no exposure
            logging.info('Reading the site collection')
            with self.monitor('reading site collection', autoflush=True):
                self.sitecol = readinput.get_site_collection(self.oqparam)

        # save mesh and asset collection
        self.save_mesh()
        if hasattr(self, 'assets_by_site'):
            self.assetcol = riskinput.build_asset_collection(
                self.assets_by_site, self.oqparam.time_event)
Exemple #24
0
 def pre_execute(self, pre_calculator=None):
     """
     Check if there is a previous calculation ID.
     If yes, read the inputs by retrieving the previous calculation;
     if not, read the inputs directly.
     """
     oq = self.oqparam
     if 'gmfs' in oq.inputs:  # read hazard from file
         assert not oq.hazard_calculation_id, (
             'You cannot use --hc together with gmfs_file')
         self.read_inputs()
         save_gmfs(self)
     elif 'hazard_curves' in oq.inputs:  # read hazard from file
         assert not oq.hazard_calculation_id, (
             'You cannot use --hc together with hazard_curves')
         haz_sitecol = readinput.get_site_collection(oq)
         # NB: horrible: get_site_collection calls get_pmap_from_nrml
         # that sets oq.investigation_time, so it must be called first
         self.load_riskmodel()  # must be after get_site_collection
         self.read_exposure(haz_sitecol)  # define .assets_by_site
         self.datastore['poes/grp-00'] = readinput.pmap
         self.datastore['sitecol'] = self.sitecol
         self.datastore['assetcol'] = self.assetcol
         self.datastore['csm_info'] = fake = source.CompositionInfo.fake()
         self.rlzs_assoc = fake.get_rlzs_assoc()
     elif oq.hazard_calculation_id:
         parent = datastore.read(oq.hazard_calculation_id)
         check_precalc_consistency(
             oq.calculation_mode,
             parent['oqparam'].calculation_mode)
         self.datastore.parent = parent
         # copy missing parameters from the parent
         params = {name: value for name, value in
                   vars(parent['oqparam']).items()
                   if name not in vars(self.oqparam)}
         self.save_params(**params)
         self.read_inputs()
         oqp = parent['oqparam']
         if oqp.investigation_time != oq.investigation_time:
             raise ValueError(
                 'The parent calculation was using investigation_time=%s'
                 ' != %s' % (oqp.investigation_time, oq.investigation_time))
         if oqp.minimum_intensity != oq.minimum_intensity:
             raise ValueError(
                 'The parent calculation was using minimum_intensity=%s'
                 ' != %s' % (oqp.minimum_intensity, oq.minimum_intensity))
     elif pre_calculator:
         calc = calculators[pre_calculator](
             self.oqparam, self.datastore.calc_id)
         calc.run()
         self.param = calc.param
         self.sitecol = calc.sitecol
         self.assetcol = calc.assetcol
         self.riskmodel = calc.riskmodel
         self.rlzs_assoc = calc.rlzs_assoc
     else:
         self.read_inputs()
Exemple #25
0
    def read_risk_data(self):
        """
        Read the exposure (if any), the risk model (if any) and then the
        site collection, possibly extracted from the exposure.
        """
        oq = self.oqparam
        with self.monitor('reading site collection', autoflush=True):
            haz_sitecol = readinput.get_site_collection(oq)
        if haz_sitecol is not None:
            logging.info('There are %d hazard site(s)', len(haz_sitecol))
        oq_hazard = (self.datastore.parent['oqparam']
                     if self.datastore.parent else None)
        if 'exposure' in oq.inputs:
            self.read_exposure(haz_sitecol)
            self.load_riskmodel()  # must be called *after* read_exposure
            self.datastore['assetcol'] = self.assetcol
        elif 'assetcol' in self.datastore.parent:
            region = wkt.loads(self.oqparam.region_constraint)
            self.sitecol = haz_sitecol.within(region)
            assetcol = self.datastore.parent['assetcol']
            self.assetcol = assetcol.reduce(self.sitecol.sids)
            self.datastore['assetcol'] = self.assetcol
            logging.info('There are %d/%d assets in the region',
                         len(self.assetcol), len(assetcol))
            self.load_riskmodel()
        else:  # no exposure
            self.load_riskmodel()
            self.sitecol = haz_sitecol

        if oq_hazard:
            parent = self.datastore.parent
            if 'assetcol' in parent:
                check_time_event(oq, parent['assetcol'].occupancy_periods)
            if oq_hazard.time_event and oq_hazard.time_event != oq.time_event:
                raise ValueError(
                    'The risk configuration file has time_event=%s but the '
                    'hazard was computed with time_event=%s' %
                    (oq.time_event, oq_hazard.time_event))

        if self.oqparam.job_type == 'risk':
            taxonomies = set(taxo for taxo in self.assetcol.tagcol.taxonomy
                             if taxo != '?')

            # check that we are covering all the taxonomies in the exposure
            missing = taxonomies - set(self.riskmodel.taxonomies)
            if self.riskmodel and missing:
                raise RuntimeError('The exposure contains the taxonomies %s '
                                   'which are not in the risk model' % missing)

            # same check for the consequence models, if any
            consequence_models = riskmodels.get_risk_models(
                self.oqparam, 'consequence')
            for lt, cm in consequence_models.items():
                missing = taxonomies - set(cm)
                if missing:
                    raise ValueError('Missing consequenceFunctions for %s' %
                                     ' '.join(missing))
Exemple #26
0
    def test(self):
        fname = general.gettemp('''\
<?xml version="1.0" encoding="utf-8"?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">

    <!-- Spectral Acceleration (SA) example -->
    <hazardCurves sourceModelTreePath="b1_b2_b4" gsimTreePath="b1_b2" investigationTime="50.0" IMT="SA" saPeriod="0.025" saDamping="5.0">
        <IMLs>5.0000e-03 7.0000e-03 1.3700e-02</IMLs>

        <hazardCurve>
            <gml:Point>
                <gml:pos>-122.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8728e-01 9.8266e-01 9.4957e-01</poEs>
        </hazardCurve>
        <hazardCurve>
            <gml:Point>
                <gml:pos>-123.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8727e-02 9.8265e-02 9.4956e-02</poEs>
        </hazardCurve>
    </hazardCurves>

    <!-- Basic example, using PGA as IMT -->
    <hazardCurves sourceModelTreePath="b1_b2_b3" gsimTreePath="b1_b7" investigationTime="50.0" IMT="PGA">
        <IMLs>5.0000e-03 7.0000e-03 1.3700e-02 3.3700e-02</IMLs>

        <hazardCurve>
            <gml:Point>
                <gml:pos>-122.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8728e-01 9.8226e-01 9.4947e-01 9.2947e-01</poEs>
        </hazardCurve>
        <hazardCurve>
            <gml:Point>
                <gml:pos>-123.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8728e-02 9.8216e-02 9.4945e-02 9.2947e-02</poEs>
        </hazardCurve>
    </hazardCurves>
</nrml>
''', suffix='.xml')
        oqparam = object.__new__(oqvalidation.OqParam)
        oqparam.inputs = dict(hazard_curves=fname)
        sitecol = readinput.get_site_collection(oqparam)
        self.assertEqual(len(sitecol), 2)
        self.assertEqual(sorted(oqparam.hazard_imtls.items()),
                         [('PGA', [0.005, 0.007, 0.0137, 0.0337]),
                          ('SA(0.025)', [0.005, 0.007, 0.0137])])
        hcurves = readinput.pmap.convert(oqparam.imtls, 2)
        assert_allclose(hcurves['PGA'], numpy.array(
            [[0.098728, 0.098216, 0.094945, 0.092947],
             [0.98728, 0.98226, 0.94947, 0.92947]]))
        assert_allclose(hcurves['SA(0.025)'], numpy.array(
            [[0.098727, 0.098265, 0.094956],
             [0.98728, 0.98266, 0.94957]]))
Exemple #27
0
    def read_exposure_sitecol(self):
        """
        Read the exposure (if any) and then the site collection, possibly
        extracted from the exposure.
        """
        inputs = self.oqparam.inputs
        if 'exposure' in inputs:
            logging.info('Reading the exposure')
            with self.monitor('reading exposure', autoflush=True):
                self.exposure = readinput.get_exposure(self.oqparam)
                self.sitecol, self.assets_by_site = (
                    readinput.get_sitecol_assets(self.oqparam, self.exposure))
                self.cost_types = self.exposure.cost_types
                self.taxonomies = numpy.array(
                    sorted(self.exposure.taxonomies), '|S100')
            num_assets = self.count_assets()
            if self.datastore.parent:
                haz_sitecol = self.datastore.parent['sitecol']
            elif 'gmfs' in inputs:
                haz_sitecol = readinput.get_site_collection(self.oqparam)
            # TODO: think about the case hazard_curves in inputs
            else:
                haz_sitecol = None
            if haz_sitecol is not None and haz_sitecol != self.sitecol:
                with self.monitor('assoc_assets_sites'):
                    self.sitecol, self.assets_by_site = \
                        self.assoc_assets_sites(haz_sitecol.complete)
                ok_assets = self.count_assets()
                num_sites = len(self.sitecol)
                logging.warn('Associated %d assets to %d sites, %d discarded',
                             ok_assets, num_sites, num_assets - ok_assets)
        elif (self.datastore.parent and 'exposure' in
              self.datastore.parent['oqparam'].inputs):
            logging.info('Re-using the already imported exposure')
        else:  # no exposure
            logging.info('Reading the site collection')
            with self.monitor('reading site collection', autoflush=True):
                self.sitecol = readinput.get_site_collection(self.oqparam)

        # save mesh and asset collection
        self.save_mesh()
        if hasattr(self, 'assets_by_site'):
            self.assetcol = riskinput.build_asset_collection(
                self.assets_by_site, self.oqparam.time_event)
Exemple #28
0
    def test_wrong_discretization(self):
        source = general.writetmp("""
[general]
calculation_mode = classical
region = 27.685048 85.280857, 27.736719 85.280857, 27.733376 85.355358, 27.675015 85.355358
region_grid_spacing = 5.0
maximum_distance=1
truncation_level=3
random_seed=5
reference_vs30_type = measured
reference_vs30_value = 600.0
reference_depth_to_2pt5km_per_sec = 5.0
reference_depth_to_1pt0km_per_sec = 100.0
intensity_measure_types = PGA
""")
        oqparam = readinput.get_oqparam(source)
        with self.assertRaises(ValueError) as ctx:
            readinput.get_site_collection(oqparam)
        self.assertIn('Could not discretize region', str(ctx.exception))
Exemple #29
0
 def pre_execute(self):
     """
     parse the logic tree and source model input
     """
     logging.warn('%s is still experimental', self.__class__.__name__)
     self.sitecol = readinput.get_site_collection(self.oqparam)
     self.csm = get_composite_source_model(self.oqparam)
     self.rlzs_assoc = self.csm.info.get_rlzs_assoc()
     self.rup_data = {}
     self.num_tiles = 1
Exemple #30
0
    def test_many_rlzs(self):
        oqparam = tests.get_oqparam('classical_job.ini')
        oqparam.number_of_logic_tree_samples = 0
        sitecol = readinput.get_site_collection(oqparam)
        csm = readinput.get_composite_source_model(oqparam, sitecol)
        self.assertEqual(len(csm), 9)  # the smlt example has 1 x 3 x 3 paths;
        # there are 2 distinct tectonic region types, so 18 trt_models
        rlzs_assoc = csm.get_rlzs_assoc()
        rlzs = rlzs_assoc.realizations
        self.assertEqual(len(rlzs), 18)  # the gsimlt has 1 x 2 paths
        self.assertEqual([1, 584, 1, 584, 1, 584, 1, 582, 1, 582,
                          1, 582, 1, 582, 1, 582, 1, 582],
                         map(len, csm.trt_models))

        # test the method csm_info.get_col_ids
        col_ids_first = rlzs_assoc.csm_info.get_col_ids(rlzs[0])
        self.assertEqual(col_ids_first, set([0, 1]))
        col_ids_last = rlzs_assoc.csm_info.get_col_ids(rlzs[-1])
        self.assertEqual(col_ids_last, set([16, 17]))

        # removing 9 trt_models out of 18
        for trt_model in csm.trt_models:
            if trt_model.trt == 'Active Shallow Crust':  # no ruptures
                trt_model.num_ruptures = 0
        assoc = csm.get_rlzs_assoc()

        expected_assoc = """\
<RlzsAssoc(18)
0,SadighEtAl1997: ['<0,b1_b3_b6,@_b3,w=0.04>']
1,SadighEtAl1997: ['<0,b1_b3_b6,@_b3,w=0.04>']
2,SadighEtAl1997: ['<1,b1_b3_b7,@_b3,w=0.12>']
3,SadighEtAl1997: ['<1,b1_b3_b7,@_b3,w=0.12>']
4,SadighEtAl1997: ['<2,b1_b3_b8,@_b3,w=0.04>']
5,SadighEtAl1997: ['<2,b1_b3_b8,@_b3,w=0.04>']
6,SadighEtAl1997: ['<3,b1_b4_b6,@_b3,w=0.12>']
7,SadighEtAl1997: ['<3,b1_b4_b6,@_b3,w=0.12>']
8,SadighEtAl1997: ['<4,b1_b4_b7,@_b3,w=0.36>']
9,SadighEtAl1997: ['<4,b1_b4_b7,@_b3,w=0.36>']
10,SadighEtAl1997: ['<5,b1_b4_b8,@_b3,w=0.12>']
11,SadighEtAl1997: ['<5,b1_b4_b8,@_b3,w=0.12>']
12,SadighEtAl1997: ['<6,b1_b5_b6,@_b3,w=0.04>']
13,SadighEtAl1997: ['<6,b1_b5_b6,@_b3,w=0.04>']
14,SadighEtAl1997: ['<7,b1_b5_b7,@_b3,w=0.12>']
15,SadighEtAl1997: ['<7,b1_b5_b7,@_b3,w=0.12>']
16,SadighEtAl1997: ['<8,b1_b5_b8,@_b3,w=0.04>']
17,SadighEtAl1997: ['<8,b1_b5_b8,@_b3,w=0.04>']>"""
        self.assertEqual(str(assoc), expected_assoc)
        self.assertEqual(len(assoc.realizations), 9)

        # removing all trt_models
        for trt_model in csm.trt_models:
            if trt_model.trt == 'Subduction Interface':  # no ruptures
                trt_model.num_ruptures = 0
        self.assertEqual(csm.get_rlzs_assoc().realizations, [])
Exemple #31
0
    def read_risk_data(self):
        """
        Read the exposure (if any), the risk model (if any) and then the
        site collection, possibly extracted from the exposure.
        """
        oq = self.oqparam
        with self.monitor('reading site collection', autoflush=True):
            haz_sitecol = readinput.get_site_collection(oq)
        if haz_sitecol is not None:
            logging.info('Read %d hazard site(s)', len(haz_sitecol))

        oq_hazard = (self.datastore.parent['oqparam']
                     if self.datastore.parent else None)
        if 'exposure' in oq.inputs:
            self.read_exposure()
            self.load_riskmodel()  # must be called *after* read_exposure
            num_assets = self.count_assets()
            if self.datastore.parent:
                haz_sitecol = self.datastore.parent['sitecol']
            if haz_sitecol is not None and haz_sitecol != self.sitecol:
                with self.monitor('assoc_assets_sites'):
                    self.sitecol, self.assets_by_site = \
                        self.assoc_assets_sites(haz_sitecol.complete)
                ok_assets = self.count_assets()
                num_sites = len(self.sitecol)
                logging.warn('Associated %d assets to %d sites, %d discarded',
                             ok_assets, num_sites, num_assets - ok_assets)
        elif oq_hazard and 'exposure' in oq_hazard.inputs:
            logging.info('Re-using the already imported exposure')
            self.load_riskmodel()
        else:  # no exposure
            self.load_riskmodel()
            self.sitecol = haz_sitecol

        if oq_hazard:
            parent = self.datastore.parent
            if 'assetcol' in parent:
                check_time_event(oq, parent['assetcol'].time_events)
            if oq_hazard.time_event and oq_hazard.time_event != oq.time_event:
                raise ValueError(
                    'The risk configuration file has time_event=%s but the '
                    'hazard was computed with time_event=%s' %
                    (oq.time_event, oq_hazard.time_event))

        # save mesh and asset collection
        self.save_mesh()
        if hasattr(self, 'assets_by_site'):
            self.assetcol = riskinput.AssetCollection(
                self.assets_by_site,
                self.cost_calculator,
                oq.time_event,
                time_events=sorted(self.exposure.time_events) or '')
        elif hasattr(self, 'assetcol'):
            self.assets_by_site = self.assetcol.assets_by_site()
Exemple #32
0
    def read_risk_data(self):
        """
        Read the exposure (if any), the risk model (if any) and then the
        site collection, possibly extracted from the exposure.
        """
        oq = self.oqparam
        with self.monitor("reading site collection", autoflush=True):
            haz_sitecol = readinput.get_site_collection(oq)
        if haz_sitecol is not None:
            logging.info("Read %d hazard site(s)", len(haz_sitecol))

        oq_hazard = self.datastore.parent["oqparam"] if self.datastore.parent else None
        if "exposure" in oq.inputs:
            self.read_exposure()
            self.load_riskmodel()  # must be called *after* read_exposure
            num_assets = self.count_assets()
            if self.datastore.parent:
                haz_sitecol = self.datastore.parent["sitecol"]
            if haz_sitecol is not None and haz_sitecol != self.sitecol:
                with self.monitor("assoc_assets_sites"):
                    self.sitecol, self.assets_by_site = self.assoc_assets_sites(haz_sitecol.complete)
                ok_assets = self.count_assets()
                num_sites = len(self.sitecol)
                logging.warn(
                    "Associated %d assets to %d sites, %d discarded", ok_assets, num_sites, num_assets - ok_assets
                )
        elif oq_hazard and "exposure" in oq_hazard.inputs:
            logging.info("Re-using the already imported exposure")
            self.load_riskmodel()
        else:  # no exposure
            self.load_riskmodel()
            self.sitecol = haz_sitecol

        if oq_hazard:
            parent = self.datastore.parent
            if "assetcol" in parent:
                check_time_event(oq, parent["assetcol"].time_events)
            if oq_hazard.time_event != oq.time_event:
                raise ValueError(
                    "The risk configuration file has time_event=%s but the "
                    "hazard was computed with time_event=%s" % (oq.time_event, oq_hazard.time_event)
                )

        # save mesh and asset collection
        self.save_mesh()
        if hasattr(self, "assets_by_site"):
            self.assetcol = riskinput.AssetCollection(
                self.assets_by_site,
                self.cost_calculator,
                oq.time_event,
                time_events=sorted(self.exposure.time_events) or "",
            )
        elif hasattr(self, "assetcol"):
            self.assets_by_site = self.assetcol.assets_by_site()
Exemple #33
0
    def test_wrong_discretization(self):
        source = general.writetmp("""
[general]
calculation_mode = event_based
region = 27.685048 85.280857, 27.736719 85.280857, 27.733376 85.355358, 27.675015 85.355358
region_grid_spacing = 5.0
maximum_distance=1
truncation_level=3
random_seed=5
reference_vs30_type = measured
reference_vs30_value = 600.0
reference_depth_to_2pt5km_per_sec = 5.0
reference_depth_to_1pt0km_per_sec = 100.0
intensity_measure_types = PGA
investigation_time = 50.
""")
        oqparam = readinput.get_oqparam(source)
        with self.assertRaises(ValueError) as ctx:
            readinput.get_site_collection(oqparam)
        self.assertIn('Could not discretize region', str(ctx.exception))
Exemple #34
0
    def initialize_site_collection(self):
        """
        Populate the hazard site table and create a sitecollection attribute.
        """
        logs.LOG.progress("initializing sites")
        points, site_ids = self.job.save_hazard_sites()
        if not site_ids:
            raise RuntimeError('No sites were imported!')

        logs.LOG.progress("initializing site collection")
        oqparam = self.job.get_oqparam()
        self.site_collection = get_site_collection(oqparam, points, site_ids)
 def pre_execute(self):
     """
     parse the logic tree and source model input
     """
     logging.warn('%s is still experimental', self.__class__.__name__)
     self.sitecol = readinput.get_site_collection(self.oqparam)
     self.csm = get_composite_source_model(self.oqparam)
     self.gsims_by_grp = {
         grp.id: self.csm.info.get_gsims(grp.id)
         for sm in self.csm.source_models for grp in sm.src_groups
     }
     self.rup_data = {}
Exemple #36
0
    def initialize_site_collection(self):
        """
        Populate the hazard site table and create a sitecollection attribute.
        """
        logs.LOG.progress("initializing sites")
        points, site_ids = self.job.save_hazard_sites()
        if not site_ids:
            raise RuntimeError('No sites were imported!')

        logs.LOG.progress("initializing site collection")
        oqparam = self.job.get_oqparam()
        self.site_collection = get_site_collection(oqparam, points, site_ids)
Exemple #37
0
 def pre_execute(self):
     """
     Read the site collection and initialize GmfComputer, tags and seeds
     """
     if 'exposure' in self.oqparam.inputs:
         logging.info('Reading the exposure')
         exposure = readinput.get_exposure(self.oqparam)
         logging.info('Reading the site collection')
         self.sitecol, _assets = readinput.get_sitecol_assets(
             self.oqparam, exposure)
     else:
         self.sitecol = readinput.get_site_collection(self.oqparam)
     self._init_tags()
Exemple #38
0
    def test_many_rlzs(self):
        oqparam = tests.get_oqparam('classical_job.ini')
        oqparam.number_of_logic_tree_samples = 0
        sitecol = readinput.get_site_collection(oqparam)
        csm = readinput.get_composite_source_model(oqparam, sitecol)
        self.assertEqual(len(csm), 9)  # the smlt example has 1 x 3 x 3 paths;
        # there are 2 distinct tectonic region types, so 18 trt_models
        self.assertEqual(sum(1 for tm in csm.trt_models), 18)

        rlzs_assoc = csm.info.get_rlzs_assoc()
        rlzs = rlzs_assoc.realizations
        self.assertEqual(len(rlzs), 18)  # the gsimlt has 1 x 2 paths
        # counting the sources in each TRT model (unsplit)
        self.assertEqual(
            [1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2],
            list(map(len, csm.trt_models)))

        # test the method extract
        assoc = rlzs_assoc.extract([1, 5], csm.info)
        self.assertEqual(
            str(assoc), """\
<RlzsAssoc(size=4, rlzs=2)
0,SadighEtAl1997(): ['<1,b1_b3_b6~b2_b3,w=0.5>']
1,ChiouYoungs2008(): ['<1,b1_b3_b6~b2_b3,w=0.5>']
4,SadighEtAl1997(): ['<5,b1_b3_b8~b2_b3,w=0.5>']
5,ChiouYoungs2008(): ['<5,b1_b3_b8~b2_b3,w=0.5>']>""")

        # removing 9 trt_models out of 18
        def count_ruptures(trt_model):
            if trt_model.trt == 'Active Shallow Crust':  # no ruptures
                return 0
            else:
                return 1

        assoc = csm.info.get_rlzs_assoc(count_ruptures)
        expected_assoc = """\
<RlzsAssoc(size=9, rlzs=9)
0,SadighEtAl1997(): ['<0,b1_b3_b6~@_b3,w=0.04>']
2,SadighEtAl1997(): ['<1,b1_b3_b7~@_b3,w=0.12>']
4,SadighEtAl1997(): ['<2,b1_b3_b8~@_b3,w=0.04>']
6,SadighEtAl1997(): ['<3,b1_b4_b6~@_b3,w=0.12>']
8,SadighEtAl1997(): ['<4,b1_b4_b7~@_b3,w=0.36>']
10,SadighEtAl1997(): ['<5,b1_b4_b8~@_b3,w=0.12>']
12,SadighEtAl1997(): ['<6,b1_b5_b6~@_b3,w=0.04>']
14,SadighEtAl1997(): ['<7,b1_b5_b7~@_b3,w=0.12>']
16,SadighEtAl1997(): ['<8,b1_b5_b8~@_b3,w=0.04>']>"""
        self.assertEqual(str(assoc), expected_assoc)
        self.assertEqual(len(assoc.realizations), 9)

        # removing all trt_models
        self.assertEqual(csm.info.get_rlzs_assoc(lambda t: 0).realizations, [])
Exemple #39
0
def get_gmfs(calculator):
    """
    :param calculator: a scenario_risk/damage or gmf_ebrisk calculator
    :returns: a pair (eids, R) where R is the number of realizations
    """
    dstore = calculator.datastore
    oq = calculator.oqparam
    sitecol = calculator.sitecol
    if dstore.parent:
        haz_sitecol = dstore.parent['sitecol']  # S sites
    else:
        haz_sitecol = sitecol  # N sites
    N = len(haz_sitecol.complete)
    I = len(oq.imtls)
    if 'gmfs' in oq.inputs:  # from file
        logging.info('Reading gmfs from file')
        eids, gmfs = readinput.get_gmfs(oq)
        E = len(eids)
        if hasattr(oq, 'number_of_ground_motion_fields'):
            if oq.number_of_ground_motion_fields != E:
                raise RuntimeError(
                    'Expected %d ground motion fields, found %d' %
                    (oq.number_of_ground_motion_fields, E))
        else:  # set the number of GMFs from the file
            oq.number_of_ground_motion_fields = E
        # NB: get_gmfs redefine oq.sites in case of GMFs from XML or CSV
        haz_sitecol = readinput.get_site_collection(oq) or haz_sitecol
        calculator.assoc_assets(haz_sitecol)
        R, N, E, I = gmfs.shape
        idx = (slice(None)
               if haz_sitecol.indices is None else haz_sitecol.indices)
        save_gmf_data(dstore, haz_sitecol, gmfs[:, idx])

        # store the events, useful when read the GMFs from a file
        events = numpy.zeros(E, readinput.stored_event_dt)
        events['eid'] = eids
        dstore['events'] = events
        return eids, len(gmfs)

    elif calculator.precalc:  # from previous step
        num_assocs = dstore['csm_info'].get_num_rlzs()
        E = oq.number_of_ground_motion_fields
        eids = numpy.arange(E)
        gmfs = numpy.zeros((num_assocs, N, E, I))
        for g, gsim in enumerate(calculator.precalc.gsims):
            gmfs[g, sitecol.sids] = calculator.precalc.gmfa[gsim]
        return eids, len(gmfs)

    else:  # with --hc option
        return (calculator.datastore['events']['eid'],
                calculator.datastore['csm_info'].get_num_rlzs())
Exemple #40
0
 def read_exposure(self, haz_sitecol=None):
     """
     Read the exposure, the riskmodel and update the attributes .exposure,
     .sitecol, .assetcol
     """
     logging.info('Reading the exposure')
     with self.monitor('reading exposure', autoflush=True):
         self.exposure = readinput.get_exposure(self.oqparam)
         mesh, assets_by_site = (readinput.get_mesh_assets_by_site(
             self.oqparam, self.exposure))
     if haz_sitecol:
         tot_assets = sum(len(assets) for assets in assets_by_site)
         all_sids = haz_sitecol.complete.sids
         sids = set(haz_sitecol.sids)
         # associate the assets to the hazard sites
         asset_hazard_distance = self.oqparam.asset_hazard_distance
         siteobjects = geo.utils.GeographicObjects(
             Site(sid, lon, lat) for sid, lon, lat in zip(
                 haz_sitecol.sids, haz_sitecol.lons, haz_sitecol.lats))
         assets_by_sid = general.AccumDict(accum=[])
         for assets in assets_by_site:
             if len(assets):
                 lon, lat = assets[0].location
                 site, distance = siteobjects.get_closest(lon, lat)
                 if site.sid in sids and distance <= asset_hazard_distance:
                     # keep the assets, otherwise discard them
                     assets_by_sid += {site.sid: list(assets)}
         if not assets_by_sid:
             raise AssetSiteAssociationError(
                 'Could not associate any site to any assets within the '
                 'asset_hazard_distance of %s km' % asset_hazard_distance)
         mask = numpy.array([sid in assets_by_sid for sid in all_sids])
         assets_by_site = [assets_by_sid[sid] for sid in all_sids]
         num_assets = sum(len(assets) for assets in assets_by_site)
         logging.info('Associated %d/%d assets to the hazard sites',
                      num_assets, tot_assets)
         self.sitecol = haz_sitecol.complete.filter(mask)
     else:  # use the exposure sites as hazard sites
         self.sitecol = readinput.get_site_collection(self.oqparam, mesh)
     self.assetcol = asset.AssetCollection(
         self.exposure.asset_refs,
         assets_by_site,
         self.exposure.tagcol,
         self.exposure.cost_calculator,
         self.oqparam.time_event,
         occupancy_periods=hdf5.array_of_vstr(
             sorted(self.exposure.occupancy_periods)))
     logging.info('Considering %d assets on %d sites', len(self.assetcol),
                  len(self.sitecol))
Exemple #41
0
    def test_many_rlzs(self):
        oqparam = tests.get_oqparam('classical_job.ini')
        oqparam.number_of_logic_tree_samples = 0
        sitecol = readinput.get_site_collection(oqparam)
        csm = readinput.get_composite_source_model(oqparam, sitecol)
        self.assertEqual(len(csm), 9)  # the smlt example has 1 x 3 x 3 paths;
        # there are 2 distinct tectonic region types, so 18 trt_models
        self.assertEqual(sum(1 for tm in csm.trt_models), 18)

        rlzs_assoc = csm.info.get_rlzs_assoc()
        rlzs = rlzs_assoc.realizations
        self.assertEqual(len(rlzs), 18)  # the gsimlt has 1 x 2 paths
        # counting the sources in each TRT model (unsplit)
        self.assertEqual(
            [1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2],
            list(map(len, csm.trt_models)))

        # test the method extract
        assoc = rlzs_assoc.extract([1, 5])
        self.assertEqual(str(assoc), """\
<RlzsAssoc(size=4, rlzs=2)
0,SadighEtAl1997: ['<1,b1_b3_b6,b2_b3,w=0.5>']
1,ChiouYoungs2008: ['<1,b1_b3_b6,b2_b3,w=0.5>']
4,SadighEtAl1997: ['<5,b1_b3_b8,b2_b3,w=0.5>']
5,ChiouYoungs2008: ['<5,b1_b3_b8,b2_b3,w=0.5>']>""")

        # removing 9 trt_models out of 18
        def count_ruptures(trt_model):
            if trt_model.trt == 'Active Shallow Crust':  # no ruptures
                return 0
            else:
                return 1
        assoc = csm.info.get_rlzs_assoc(count_ruptures)
        expected_assoc = """\
<RlzsAssoc(size=9, rlzs=9)
0,SadighEtAl1997: ['<0,b1_b3_b6,@_b3,w=0.04>']
2,SadighEtAl1997: ['<1,b1_b3_b7,@_b3,w=0.12>']
4,SadighEtAl1997: ['<2,b1_b3_b8,@_b3,w=0.04>']
6,SadighEtAl1997: ['<3,b1_b4_b6,@_b3,w=0.12>']
8,SadighEtAl1997: ['<4,b1_b4_b7,@_b3,w=0.36>']
10,SadighEtAl1997: ['<5,b1_b4_b8,@_b3,w=0.12>']
12,SadighEtAl1997: ['<6,b1_b5_b6,@_b3,w=0.04>']
14,SadighEtAl1997: ['<7,b1_b5_b7,@_b3,w=0.12>']
16,SadighEtAl1997: ['<8,b1_b5_b8,@_b3,w=0.04>']>"""
        self.assertEqual(str(assoc), expected_assoc)
        self.assertEqual(len(assoc.realizations), 9)

        # removing all trt_models
        self.assertEqual(csm.info.get_rlzs_assoc(lambda t: 0).realizations, [])
Exemple #42
0
def main(job_ini):
    logging.basicConfig(level=logging.INFO)
    oq = readinput.get_oqparam(job_ini)
    sitecol = readinput.get_site_collection(oq)
    src_filter = SourceFilter(sitecol, oq.maximum_distance)
    csm = readinput.get_composite_source_model(oq)
    for smr, rlzs in csm.full_lt.get_rlzs_by_smr().items():
        groups = csm.get_groups(smr)
        for rlz in rlzs:
            hcurves = calc_hazard_curves(groups, src_filter, oq.imtls,
                                         csm.full_lt.gsim_by_trt(rlz),
                                         oq.truncation_level,
                                         parallel.Starmap.apply)
            print('rlz=%s, hcurves=%s' % (rlz, hcurves))
    parallel.Starmap.shutdown()
Exemple #43
0
    def read_exposure_sitecol(self):
        """
        Read the exposure (if any) and then the site collection, possibly
        extracted from the exposure.
        """
        logging.info('Reading the site collection')
        with self.monitor('reading site collection', autoflush=True):
            haz_sitecol = readinput.get_site_collection(self.oqparam)
        inputs = self.oqparam.inputs
        if 'exposure' in inputs:
            logging.info('Reading the exposure')
            with self.monitor('reading exposure', autoflush=True):
                self.exposure = readinput.get_exposure(self.oqparam)
                self.sitecol, self.assets_by_site = (
                    readinput.get_sitecol_assets(self.oqparam, self.exposure))
                if len(self.exposure.cost_types):
                    self.cost_types = self.exposure.cost_types
                self.taxonomies = numpy.array(
                    sorted(self.exposure.taxonomies), '|S100')
            num_assets = self.count_assets()
            if self.datastore.parent:
                haz_sitecol = self.datastore.parent['sitecol']
            if haz_sitecol is not None and haz_sitecol != self.sitecol:
                with self.monitor('assoc_assets_sites'):
                    self.sitecol, self.assets_by_site = \
                        self.assoc_assets_sites(haz_sitecol.complete)
                ok_assets = self.count_assets()
                num_sites = len(self.sitecol)
                logging.warn('Associated %d assets to %d sites, %d discarded',
                             ok_assets, num_sites, num_assets - ok_assets)
        elif (self.datastore.parent and 'exposure' in
              OqParam.from_(self.datastore.parent.attrs).inputs):
            logging.info('Re-using the already imported exposure')
        else:  # no exposure
            self.sitecol = haz_sitecol

        # save mesh and asset collection
        self.save_mesh()
        if hasattr(self, 'assets_by_site'):
            self.assetcol = riskinput.build_asset_collection(
                self.assets_by_site, self.oqparam.time_event)
            spec = set(self.oqparam.specific_assets)
            unknown = spec - set(self.assetcol['asset_ref'])
            if unknown:
                raise ValueError('The specific asset(s) %s are not in the '
                                 'exposure' % ', '.join(unknown))
Exemple #44
0
    def test_many_rlzs(self):
        oqparam = tests.get_oqparam('classical_job.ini')
        oqparam.number_of_logic_tree_samples = 0
        sitecol = readinput.get_site_collection(oqparam)
        csm = readinput.get_composite_source_model(oqparam, sitecol)
        self.assertEqual(len(csm), 9)  # the smlt example has 1 x 3 x 3 paths;
        # there are 2 distinct tectonic region types, so 18 trt_models
        rlzs_assoc = csm.get_rlzs_assoc()
        rlzs = rlzs_assoc.realizations
        self.assertEqual(len(rlzs), 18)  # the gsimlt has 1 x 2 paths
        self.assertEqual(
            [1, 14, 1, 14, 1, 14, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12],
            list(map(len, csm.trt_models)))

        # test the method get_col_ids
        col_ids_first = rlzs_assoc.get_col_ids(rlzs[0])
        self.assertEqual(col_ids_first, set([0, 1]))
        col_ids_last = rlzs_assoc.get_col_ids(rlzs[-1])
        self.assertEqual(col_ids_last, set([16, 17]))

        # removing 9 trt_models out of 18
        for trt_model in csm.trt_models:
            if trt_model.trt == 'Active Shallow Crust':  # no ruptures
                trt_model.num_ruptures = 0
        assoc = csm.get_rlzs_assoc()

        expected_assoc = """\
<RlzsAssoc(9)
0,SadighEtAl1997: ['<0,b1_b3_b6,@_b3,w=0.04>']
2,SadighEtAl1997: ['<1,b1_b3_b7,@_b3,w=0.12>']
4,SadighEtAl1997: ['<2,b1_b3_b8,@_b3,w=0.04>']
6,SadighEtAl1997: ['<3,b1_b4_b6,@_b3,w=0.12>']
8,SadighEtAl1997: ['<4,b1_b4_b7,@_b3,w=0.36>']
10,SadighEtAl1997: ['<5,b1_b4_b8,@_b3,w=0.12>']
12,SadighEtAl1997: ['<6,b1_b5_b6,@_b3,w=0.04>']
14,SadighEtAl1997: ['<7,b1_b5_b7,@_b3,w=0.12>']
16,SadighEtAl1997: ['<8,b1_b5_b8,@_b3,w=0.04>']>"""
        self.assertEqual(str(assoc), expected_assoc)
        self.assertEqual(len(assoc.realizations), 9)

        # removing all trt_models
        for trt_model in csm.trt_models:
            if trt_model.trt == 'Subduction Interface':  # no ruptures
                trt_model.num_ruptures = 0
        self.assertEqual(csm.get_rlzs_assoc().realizations, [])
Exemple #45
0
    def initialize_site_collection(self):
        """
        Populate the hazard site table and create a sitecollection attribute.
        """
        logs.LOG.progress("initializing sites")
        points, site_ids = self.job.save_hazard_sites()
        if not site_ids:
            raise RuntimeError('No sites were imported!')

        logs.LOG.progress("initializing site collection")
        oqparam = self.job.get_oqparam()
        if 'site_model' in oqparam.inputs:
            sm_params = SiteModelParams(
                self.job, get_site_model(oqparam))
        else:
            sm_params = None
        self.site_collection = get_site_collection(
            oqparam, points, site_ids, sm_params)
Exemple #46
0
    def test_oversampling(self):
        from openquake.qa_tests_data.classical import case_17
        oq = readinput.get_oqparam(
            os.path.join(os.path.dirname(case_17.__file__), 'job.ini'))
        sitecol = readinput.get_site_collection(oq)
        csm = readinput.get_composite_source_model(oq, sitecol)
        assoc = csm.info.get_rlzs_assoc()
        self.assertEqual(
            str(assoc),
            "<RlzsAssoc(size=2, rlzs=5)\n"
            "0,SadighEtAl1997: ['<0,b1,b1,w=0.2>']\n"
            "1,SadighEtAl1997: ['<1,b2,b1,w=0.2>', '<2,b2,b1,w=0.2>', '<3,b2,b1,w=0.2>', '<4,b2,b1,w=0.2>']>")

        # check CompositionInfo serialization
        array, attrs = assoc.csm_info.__toh5__()
        new = object.__new__(CompositionInfo)
        new.__fromh5__(array, attrs)
        self.assertEqual(repr(new), repr(assoc.csm_info))
Exemple #47
0
    def test_oversampling(self):
        from openquake.qa_tests_data.classical import case_17
        oq = readinput.get_oqparam(
            os.path.join(os.path.dirname(case_17.__file__), 'job.ini'))
        sitecol = readinput.get_site_collection(oq)
        csm = readinput.get_composite_source_model(oq, sitecol)
        assoc = csm.info.get_rlzs_assoc(lambda tm: 1)
        self.assertEqual(
            str(assoc), "<RlzsAssoc(size=2, rlzs=5)\n"
            "0,SadighEtAl1997(): [0 1 2]\n"
            "1,SadighEtAl1997(): [3 4]>")

        # check CompositionInfo serialization
        dic, attrs = csm.info.__toh5__()
        new = object.__new__(CompositionInfo)
        new.__fromh5__(dic, attrs)
        self.assertEqual(repr(new),
                         repr(csm.info).replace('0.20000000000000004', '0.2'))
Exemple #48
0
 def pre_execute(self):
     """
     Read the site collection and the sources.
     """
     if 'exposure' in self.oqparam.inputs:
         logging.info('Reading the exposure')
         exposure = readinput.get_exposure(self.oqparam)
         self.sitecol, _assets = readinput.get_sitecol_assets(
             self.oqparam, exposure)
     else:
         logging.info('Reading the site collection')
         self.sitecol = readinput.get_site_collection(self.oqparam)
     logging.info('Reading the effective source models')
     source_models = list(
         readinput.get_effective_source_models(self.oqparam, self.sitecol))
     self.all_sources = [src for src_model in source_models
                         for trt_model in src_model.trt_models
                         for src in trt_model]
     self.job_info = readinput.get_job_info(
         self.oqparam, source_models, self.sitecol)
Exemple #49
0
def _info(name, filtersources, weightsources):
    if name in base.calculators:
        print(textwrap.dedent(base.calculators[name].__doc__.strip()))
    elif name == 'gsims':
        for gs in gsim.get_available_gsims():
            print(gs)
    elif name.endswith('.xml'):
        print(nrml.read(name).to_str())
    elif name.endswith(('.ini', '.zip')):
        oqparam = readinput.get_oqparam(name)
        if 'exposure' in oqparam.inputs:
            expo = readinput.get_exposure(oqparam)
            sitecol, assets_by_site = readinput.get_sitecol_assets(
                oqparam, expo)
        elif filtersources or weightsources:
            sitecol, assets_by_site = readinput.get_site_collection(
                oqparam), []
        else:
            sitecol, assets_by_site = None, []
        if 'source_model_logic_tree' in oqparam.inputs:
            print('Reading the source model...')
            if weightsources:
                sp = source.SourceFilterWeighter
            elif filtersources:
                sp = source.SourceFilter
            else:
                sp = source.BaseSourceProcessor  # do nothing
            csm = readinput.get_composite_source_model(oqparam, sitecol, sp)
            assoc = csm.get_rlzs_assoc()
            _print_info(
                dict(rlzs_assoc=assoc, oqparam=oqparam,
                     composite_source_model=csm, sitecol=sitecol),
                filtersources, weightsources)
        if len(assets_by_site):
            assetcol = riskinput.build_asset_collection(assets_by_site)
            dic = groupby(assetcol, operator.attrgetter('taxonomy'))
            for taxo, num in dic.items():
                print('taxonomy #%d, %d assets' % (taxo, num))
            print('total assets = %d' % len(assetcol))
    else:
        print("No info for '%s'" % name)
Exemple #50
0
    def test_oversampling(self):
        from openquake.qa_tests_data.classical import case_17
        oq = readinput.get_oqparam(
            os.path.join(os.path.dirname(case_17.__file__), 'job.ini'))
        sitecol = readinput.get_site_collection(oq)
        with mock.patch('logging.warn') as warn:
            csm = readinput.get_composite_source_model(oq, sitecol)
        messages = [args[0][0] % args[0][1:] for args in warn.call_args_list]
        self.assertEqual(
            messages, ["The source path ('b2',) was sampled 4 times"])
        assoc = csm.get_rlzs_assoc()
        self.assertEqual(
            str(assoc),
            "<RlzsAssoc(2)\n"
            "0,SadighEtAl1997: ['<0,b1,b1,w=0.2>']\n"
            "1,SadighEtAl1997: ['<1,b2,b1,w=0.2,col=1>', '<2,b2,b1,w=0.2,col=2>', '<3,b2,b1,w=0.2,col=3>', '<4,b2,b1,w=0.2,col=4>']>")

        # test the method csm_info.get_col_ids
        col_ids_first = assoc.csm_info.get_col_ids(assoc.realizations[0])
        self.assertEqual(col_ids_first, set([0]))
        col_ids_last = assoc.csm_info.get_col_ids(assoc.realizations[-1])
        self.assertEqual(col_ids_last, set([4]))
Exemple #51
0
def _info(name, filtersources, weightsources):
    if name in base.calculators:
        print(textwrap.dedent(base.calculators[name].__doc__.strip()))
    elif name == "gsims":
        for gs in gsim.get_available_gsims():
            print(gs)
    elif name.endswith(".xml"):
        print(nrml.read(name).to_str())
    elif name.endswith((".ini", ".zip")):
        oqparam = readinput.get_oqparam(name)
        if "exposure" in oqparam.inputs:
            expo = readinput.get_exposure(oqparam)
            sitecol, assets_by_site = readinput.get_sitecol_assets(oqparam, expo)
        elif filtersources or weightsources:
            sitecol, assets_by_site = readinput.get_site_collection(oqparam), []
        else:
            sitecol, assets_by_site = None, []
        if "source_model_logic_tree" in oqparam.inputs:
            print("Reading the source model...")
            if weightsources:
                sp = source.SourceFilterWeighter
            elif filtersources:
                sp = source.SourceFilter
            else:
                sp = source.BaseSourceProcessor  # do nothing
            csm = readinput.get_composite_source_model(oqparam, sitecol, sp)
            assoc = csm.get_rlzs_assoc()
            dstore = datastore.Fake(vars(oqparam), rlzs_assoc=assoc, composite_source_model=csm, sitecol=sitecol)
            _print_info(dstore, filtersources, weightsources)
        if len(assets_by_site):
            assetcol = riskinput.build_asset_collection(assets_by_site)
            dic = groupby(assetcol, operator.attrgetter("taxonomy"))
            for taxo, num in dic.items():
                print("taxonomy #%d, %d assets" % (taxo, num))
            print("total assets = %d" % len(assetcol))
    else:
        print("No info for '%s'" % name)
Exemple #52
0
def _info(name, filtersources, weightsources):
    if name in base.calculators:
        print(textwrap.dedent(base.calculators[name].__doc__.strip()))
    elif name == 'gsims':
        for gs in gsim.get_available_gsims():
            print(gs)
    elif name.endswith('.xml'):
        print(nrml.read(name).to_str())
    elif name.endswith(('.ini', '.zip')):
        oqparam = readinput.get_oqparam(name)
        if 'exposure' in oqparam.inputs:
            expo = readinput.get_exposure(oqparam)
            sitecol, assets_by_site = readinput.get_sitecol_assets(
                oqparam, expo)
        elif filtersources or weightsources:
            sitecol, assets_by_site = readinput.get_site_collection(
                oqparam), []
        else:
            sitecol, assets_by_site = None, []
        if 'source_model_logic_tree' in oqparam.inputs:
            print('Reading the source model...')
            if weightsources:
                sp = source.SourceFilterWeighter
            elif filtersources:
                sp = source.SourceFilter
            else:
                sp = source.BaseSourceProcessor  # do nothing
            csm = readinput.get_composite_source_model(oqparam, sitecol, sp)
            assoc = csm.get_rlzs_assoc()
            _print_info(assoc, oqparam, csm, sitecol,
                        filtersources, weightsources)
        if len(assets_by_site):
            print('assets = %d' %
                  sum(len(assets) for assets in assets_by_site))
    else:
        print("No info for '%s'" % name)
 def pre_execute(self):
     """
     parse the logic tree and source model input
     """
     self.sitecol = readinput.get_site_collection(self.oqparam)
     self.gsim_lt = readinput.get_gsim_lt(self.oqparam, [DEFAULT_TRT])
     self.smlt = readinput.get_source_model_lt(self.oqparam)
     parser = source.SourceModelParser(
         UCERFSourceConverter(self.oqparam.investigation_time,
                              self.oqparam.rupture_mesh_spacing))
     self.src_groups = parser.parse_src_groups(
         self.oqparam.inputs["source_model"])
     branches = sorted(self.smlt.branches.items())
     source_models = []
     num_gsim_paths = self.gsim_lt.get_num_paths()
     for ordinal, (name, branch) in enumerate(branches):
         sm = source.SourceModel(
             name, branch.weight, [name], self.src_groups,
             num_gsim_paths, ordinal, 1)
         source_models.append(sm)
     self.csm = source.CompositeSourceModel(
         self.gsim_lt, self.smlt, source_models, set_weight=False)
     self.rup_data = {}
     self.infos = []
Exemple #54
0
    def _read_risk_data(self):
        # read the exposure (if any), the risk model (if any) and then the
        # site collection, possibly extracted from the exposure.
        oq = self.oqparam
        self.load_riskmodel()  # must be called first

        if oq.hazard_calculation_id:
            with util.read(oq.hazard_calculation_id) as dstore:
                haz_sitecol = dstore['sitecol'].complete
        else:
            haz_sitecol = readinput.get_site_collection(oq)
            if hasattr(self, 'rup'):
                # for scenario we reduce the site collection to the sites
                # within the maximum distance from the rupture
                haz_sitecol, _dctx = self.cmaker.filter(
                    haz_sitecol, self.rup)
                haz_sitecol.make_complete()

            if 'site_model' in oq.inputs:
                self.datastore['site_model'] = readinput.get_site_model(oq)

        oq_hazard = (self.datastore.parent['oqparam']
                     if self.datastore.parent else None)
        if 'exposure' in oq.inputs:
            exposure = self.read_exposure(haz_sitecol)
            self.datastore['assetcol'] = self.assetcol
            self.datastore['assetcol/num_taxonomies'] = (
                self.assetcol.num_taxonomies_by_site())
            if hasattr(readinput.exposure, 'exposures'):
                self.datastore['assetcol/exposures'] = (
                    numpy.array(exposure.exposures, hdf5.vstr))
        elif 'assetcol' in self.datastore.parent:
            assetcol = self.datastore.parent['assetcol']
            if oq.region:
                region = wkt.loads(oq.region)
                self.sitecol = haz_sitecol.within(region)
            if oq.shakemap_id or 'shakemap' in oq.inputs:
                self.sitecol, self.assetcol = self.read_shakemap(
                    haz_sitecol, assetcol)
                self.datastore['assetcol'] = self.assetcol
                logging.info('Extracted %d/%d assets',
                             len(self.assetcol), len(assetcol))
                nsites = len(self.sitecol)
                if (oq.spatial_correlation != 'no' and
                        nsites > MAXSITES):  # hard-coded, heuristic
                    raise ValueError(CORRELATION_MATRIX_TOO_LARGE % nsites)
            elif hasattr(self, 'sitecol') and general.not_equal(
                    self.sitecol.sids, haz_sitecol.sids):
                self.assetcol = assetcol.reduce(self.sitecol)
                self.datastore['assetcol'] = self.assetcol
                self.datastore['assetcol/num_taxonomies'] = (
                    self.assetcol.num_taxonomies_by_site())
                logging.info('Extracted %d/%d assets',
                             len(self.assetcol), len(assetcol))
            else:
                self.assetcol = assetcol
        else:  # no exposure
            self.sitecol = haz_sitecol
            if self.sitecol:
                logging.info('Read %d hazard sites', len(self.sitecol))

        if oq_hazard:
            parent = self.datastore.parent
            if 'assetcol' in parent:
                check_time_event(oq, parent['assetcol'].occupancy_periods)
            elif oq.job_type == 'risk' and 'exposure' not in oq.inputs:
                raise ValueError('Missing exposure both in hazard and risk!')
            if oq_hazard.time_event and oq_hazard.time_event != oq.time_event:
                raise ValueError(
                    'The risk configuration file has time_event=%s but the '
                    'hazard was computed with time_event=%s' % (
                        oq.time_event, oq_hazard.time_event))

        if oq.job_type == 'risk':
            taxonomies = set(taxo for taxo in self.assetcol.tagcol.taxonomy
                             if taxo != '?')

            # check that we are covering all the taxonomies in the exposure
            missing = taxonomies - set(self.riskmodel.taxonomies)
            if self.riskmodel and missing:
                raise RuntimeError('The exposure contains the taxonomies %s '
                                   'which are not in the risk model' % missing)

            # same check for the consequence models, if any
            if any(key.endswith('_consequence') for key in oq.inputs):
                for taxonomy in taxonomies:
                    cfs = self.riskmodel[taxonomy].consequence_functions
                    if not cfs:
                        raise ValueError(
                            'Missing consequenceFunctions for %s' % taxonomy)

        if hasattr(self, 'sitecol') and self.sitecol:
            self.datastore['sitecol'] = self.sitecol.complete
        # used in the risk calculators
        self.param = dict(individual_curves=oq.individual_curves,
                          avg_losses=oq.avg_losses)

        # store the `exposed_value` if there is an exposure
        if 'exposed_value' not in set(self.datastore) and hasattr(
                self, 'assetcol'):
            self.datastore['exposed_value'] = self.assetcol.agg_value(
                *oq.aggregate_by)
Exemple #55
0
 def pre_execute(self):
     """
     Check if there is a previous calculation ID.
     If yes, read the inputs by retrieving the previous calculation;
     if not, read the inputs directly.
     """
     oq = self.oqparam
     if 'gmfs' in oq.inputs or 'multi_peril' in oq.inputs:
         # read hazard from files
         assert not oq.hazard_calculation_id, (
             'You cannot use --hc together with gmfs_file')
         self.read_inputs()
         if 'gmfs' in oq.inputs:
             save_gmfs(self)
         else:
             self.save_multi_peril()
     elif 'hazard_curves' in oq.inputs:  # read hazard from file
         assert not oq.hazard_calculation_id, (
             'You cannot use --hc together with hazard_curves')
         haz_sitecol = readinput.get_site_collection(oq)
         # NB: horrible: get_site_collection calls get_pmap_from_nrml
         # that sets oq.investigation_time, so it must be called first
         self.load_riskmodel()  # must be after get_site_collection
         self.read_exposure(haz_sitecol)  # define .assets_by_site
         self.datastore['poes/grp-00'] = fix_ones(readinput.pmap)
         self.datastore['sitecol'] = self.sitecol
         self.datastore['assetcol'] = self.assetcol
         self.datastore['csm_info'] = fake = source.CompositionInfo.fake()
         self.rlzs_assoc = fake.get_rlzs_assoc()
     elif oq.hazard_calculation_id:
         parent = util.read(oq.hazard_calculation_id)
         self.check_precalc(parent['oqparam'].calculation_mode)
         self.datastore.parent = parent
         # copy missing parameters from the parent
         params = {name: value for name, value in
                   vars(parent['oqparam']).items()
                   if name not in vars(self.oqparam)}
         self.save_params(**params)
         self.read_inputs()
         oqp = parent['oqparam']
         if oqp.investigation_time != oq.investigation_time:
             raise ValueError(
                 'The parent calculation was using investigation_time=%s'
                 ' != %s' % (oqp.investigation_time, oq.investigation_time))
         if oqp.minimum_intensity != oq.minimum_intensity:
             raise ValueError(
                 'The parent calculation was using minimum_intensity=%s'
                 ' != %s' % (oqp.minimum_intensity, oq.minimum_intensity))
         missing_imts = set(oq.risk_imtls) - set(oqp.imtls)
         if missing_imts:
             raise ValueError(
                 'The parent calculation is missing the IMT(s) %s' %
                 ', '.join(missing_imts))
     elif self.__class__.precalc:
         calc = calculators[self.__class__.precalc](
             self.oqparam, self.datastore.calc_id)
         calc.run()
         self.param = calc.param
         self.sitecol = calc.sitecol
         self.assetcol = calc.assetcol
         self.riskmodel = calc.riskmodel
         if hasattr(calc, 'rlzs_assoc'):
             self.rlzs_assoc = calc.rlzs_assoc
         else:
             # this happens for instance for a scenario_damage without
             # rupture, gmfs, multi_peril
             raise InvalidFile(
                 '%(job_ini)s: missing gmfs_csv, multi_peril_csv' %
                 oq.inputs)
         if hasattr(calc, 'csm'):  # no scenario
             self.csm = calc.csm
     else:
         self.read_inputs()
     if self.riskmodel:
         self.save_riskmodel()