Exemple #1
0
    def test_lr_eq_0_cov_gt_0(self):
        # If a vulnerability function loss ratio is 0 and its corresponding CoV
        # is > 0, a ValueError should be raised
        vuln_content = writetmp(u"""\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns="http://openquake.org/xmlns/nrml/0.4"
      xmlns:gml="http://www.opengis.net/gml">
    <vulnerabilityModel>
        <discreteVulnerabilitySet vulnerabilitySetID="PAGER"
                                  assetCategory="population"
                                  lossCategory="occupants">
            <IML IMT="PGV">0.005 0.007 0.0098 0.0137</IML>
            <discreteVulnerability vulnerabilityFunctionID="A"
                                   probabilisticDistribution="LN">
                <lossRatio>0.00 0.06 0.18 0.36</lossRatio>
                <coefficientsVariation>0.30 0.30 0.30 0.30
                </coefficientsVariation>
            </discreteVulnerability>
        </discreteVulnerabilitySet>
    </vulnerabilityModel>
</nrml>
""")
        with self.assertRaises(ValueError) as ar:
            nrml.parse(vuln_content)
        self.assertIn(
            'It is not valid to define a loss ratio = 0.0 with a '
            'corresponding coeff. of variation > 0.0', str(ar.exception))
Exemple #2
0
def validate_nrml(request):
    """
    Leverage oq-risklib to check if a given XML text is a valid NRML

    :param request:
        a `django.http.HttpRequest` object containing the mandatory
        parameter 'xml_text': the text of the XML to be validated as NRML

    :returns: a JSON object, containing:
        * 'valid': a boolean indicating if the provided text is a valid NRML
        * 'error_msg': the error message, if any error was found
                       (None otherwise)
        * 'error_line': line of the given XML where the error was found
                        (None if no error was found or if it was not a
                        validation error)
    """
    xml_text = request.POST.get('xml_text')
    if not xml_text:
        return HttpResponseBadRequest(
            'Please provide the "xml_text" parameter')
    xml_file = writetmp(xml_text, suffix='.xml')
    try:
        nrml.parse(xml_file)
    except ExpatError as exc:
        return _make_response(error_msg=str(exc),
                              error_line=exc.lineno,
                              valid=False)
    except Exception as exc:
        # get the exception message
        exc_msg = exc.args[0]
        if isinstance(exc_msg, bytes):
            exc_msg = exc_msg.decode('utf-8')  # make it a unicode object
        elif isinstance(exc_msg, unicode):
            pass
        else:
            # if it is another kind of object, it is not obvious a priori how
            # to extract the error line from it
            return _make_response(error_msg=unicode(exc_msg),
                                  error_line=None,
                                  valid=False)
        # if the line is not mentioned, the whole message is taken
        error_msg = exc_msg.split(', line')[0]
        # check if the exc_msg contains a line number indication
        search_match = re.search(r'line \d+', exc_msg)
        if search_match:
            error_line = int(search_match.group(0).split()[1])
        else:
            error_line = None
        return _make_response(error_msg=error_msg,
                              error_line=error_line,
                              valid=False)
    else:
        return _make_response(error_msg=None, error_line=None, valid=True)
Exemple #3
0
    def test_invalid_vf_pmf(self):
        fname = writetmp('''\
<?xml version="1.0" encoding="UTF-8"?> 
<nrml xmlns="http://openquake.org/xmlns/nrml/0.5"> 
	<vulnerabilityModel id="Vulnerabilidad" assetCategory="MI_AIS" lossCategory="structural"> 
		<description>Vulnerabilidad Microcomponentes</description> 
		<vulnerabilityFunction id="MI_PTR" dist="PM">
			<imls imt="PGA">0.00 0.10 0.20 0.31 0.41 0.51 0.61 0.71 0.82 0.92 1.02 1.12 1.22 1.33 1.43 1.53 </imls>
			<probabilities lr="0">1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.000000000017">0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.000024">0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.005">0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.067">0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.25">0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.50">0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.72">0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.86">0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.93">0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.97">0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0</probabilities>
			<probabilities lr="0.99">0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0</probabilities>
			<probabilities lr="0.995">0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0</probabilities>
			<probabilities lr="0.998">0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0</probabilities>
			<probabilities lr="0.999">0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0</probabilities>
			<probabilities lr="1">0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1</probabilities>
		</vulnerabilityFunction>
		<vulnerabilityFunction id="MI_TON" dist="PM">
			<imls imt="PGA">0.00 0.10 0.20 0.31 0.41 0.51 0.61 0.71 0.82 0.92 1.02 1.12 1.22 1.33 </imls>
			<probabilities lr="0">1 0 0 0 0 0 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.0000317">0 1 0 0 0 0 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.025">0 0 1 0 0 0 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.0221">0 0 0 1 0 0 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.531">0 0 0 0 1 0 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.768">0 0 0 0 0 1 0 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.898">0 0 0 0 0 0 1 0 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.958">0 0 0 0 0 0 0 1 0 0 0 0 0 0</probabilities>
			<probabilities lr="0.983">0 0 0 0 0 0 0 0 1 0 0 0 0 0</probabilities>
			<probabilities lr="0.993">0 0 0 0 0 0 0 0 0 1 0 0 0 0</probabilities>
			<probabilities lr="0.997">0 0 0 0 0 0 0 0 0 0 1 0 0 0</probabilities>
			<probabilities lr="0.998">0 0 0 0 0 0 0 0 0 0 0 1 0 0</probabilities>
			<probabilities lr="0.999">0 0 0 0 0 0 0 0 0 0 0 0 1 0</probabilities>
			<probabilities lr="1">0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1</probabilities>
		</vulnerabilityFunction>
	</vulnerabilityModel> 
</nrml>''')
        with self.assertRaises(ValueError) as ctx:
            nrml.parse(fname)
        self.assertIn('Wrong number of probabilities (expected 14, got 17)',
                      str(ctx.exception))
Exemple #4
0
    def test_ok(self):
        fname = os.path.join(EXAMPLES_DIR, 'consequence-model.xml')
        cmodel = nrml.parse(fname)
        self.assertEqual(
            repr(cmodel),
            "<ConsequenceModel structural ds1, ds2, ds3, ds4 tax1>")

        # test pickleability
        pickle.loads(pickle.dumps(cmodel))
Exemple #5
0
 def test(self):
     source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml')
     groups = nrml.parse(source_model, SourceConverter(
         investigation_time=50., rupture_mesh_spacing=2.))
     site = Site(Point(135.68, 35.68), 800, True, z1pt0=100., z2pt5=1.)
     s_filter = SourceFilter(SiteCollection([site]), None)
     imtls = DictArray({'PGV': [20, 40, 80]})
     gsim_by_trt = {'Subduction Interface': SiMidorikawa1999SInter()}
     hcurves = calc_hazard_curves_ext(groups, s_filter, imtls, gsim_by_trt)
     npt.assert_almost_equal([0.91149953, 0.12548556, 0.00177583],
                             hcurves['PGV'][0])
Exemple #6
0
 def setUp(self):
     d = os.path.dirname(os.path.dirname(__file__))
     source_model = os.path.join(d, 'source_model/multi-point-source.xml')
     [self.sources] = nrml.parse(
         source_model,
         SourceConverter(investigation_time=50., rupture_mesh_spacing=2.))
     self.site = Site(Point(0.1, 0.1), 800, True, z1pt0=100., z2pt5=1.)
     self.imt = PGA()
     self.iml = 0.1
     self.truncation_level = 1
     self.trt = 'Stable Continental Crust'
     self.gsims = {self.trt: Campbell2003()}
 def test(self):
     source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml')
     groups = nrml.parse(source_model, SourceConverter(
         investigation_time=50., rupture_mesh_spacing=2.))
     site = Site(Point(135.68, 35.68), 800, True, z1pt0=100., z2pt5=1.)
     s_filter = SourceFilter(SiteCollection([site]), {})
     imtls = DictArray({'PGV': [20, 40, 80]})
     gsim_by_trt = {'Subduction Interface': SiMidorikawa1999SInter()}
     hcurves = calc_hazard_curves(groups, s_filter, imtls, gsim_by_trt)
     npt.assert_almost_equal(
         [1.11315443e-01, 3.92180097e-03, 3.02064427e-05],
         hcurves['PGV'][0])
Exemple #8
0
    def test_one_taxonomy_many_imts(self):
        # Should raise a ValueError if a vulnerabilityFunctionID is used for
        # multiple IMTs.
        # In this test input, we've defined two functions in separate sets
        # with the same ID and different IMTs.
        vuln_content = writetmp(u"""\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns="http://openquake.org/xmlns/nrml/0.4"
      xmlns:gml="http://www.opengis.net/gml">
    <vulnerabilityModel>
        <discreteVulnerabilitySet vulnerabilitySetID="PAGER"
                                  assetCategory="population"
                                  lossCategory="occupants">
            <IML IMT="PGA">0.005 0.007 0.0098 0.0137</IML>
            <discreteVulnerability vulnerabilityFunctionID="A"
                                   probabilisticDistribution="LN">
                <lossRatio>0.01 0.06 0.18 0.36</lossRatio>
                <coefficientsVariation>0.30 0.30 0.30 0.30
         </coefficientsVariation>
            </discreteVulnerability>
        </discreteVulnerabilitySet>
        <discreteVulnerabilitySet vulnerabilitySetID="PAGER"
                                  assetCategory="population"
                                  lossCategory="occupants">
            <IML IMT="MMI">0.005 0.007 0.0098 0.0137</IML>
            <discreteVulnerability vulnerabilityFunctionID="A"
                                   probabilisticDistribution="LN">
                <lossRatio>0.01 0.06 0.18 0.36</lossRatio>
                <coefficientsVariation>0.30 0.30 0.30 0.30
                </coefficientsVariation>
            </discreteVulnerability>
        </discreteVulnerabilitySet>
    </vulnerabilityModel>
</nrml>
""")
        with self.assertRaises(InvalidFile) as ar:
            nrml.parse(vuln_content)
        self.assertIn('Duplicated vulnerabilityFunctionID: A',
                      str(ar.exception))
Exemple #9
0
    def test_missing_maxIML(self):
        vuln_content = writetmp(u"""\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">
     <fragilityModel format="continuous">
        <description>Fragility for test</description>
        <limitStates>LS1 LS2</limitStates>
        <ffs type="lognormal">
            <taxonomy>RC</taxonomy>
            <IML IMT="PGA" minIML="9.9" imlUnit="g"/>
            <ffc ls="LS1">
                <params mean="0.2" stddev="0.05" />
            </ffc>
            <ffc ls="LS2">
                <params mean="0.35" stddev="0.10" />
            </ffc>
        </ffs>
    </fragilityModel>
</nrml>""")
        with self.assertRaises(KeyError) as ar:
            nrml.parse(vuln_content)
        self.assertIn("node IML: 'maxIML', line 9", str(ar.exception))
def get_composite_source_model(oq):
    """
    :param oq: :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :returns: a `class:`openquake.commonlib.source.CompositeSourceModel`
    """
    [src_group] = nrml.parse(
        oq.inputs["source_model"],
        SourceConverter(oq.investigation_time, oq.rupture_mesh_spacing))
    source_models = []
    gsim_lt = readinput.get_gsim_lt(oq, [DEFAULT_TRT])
    smlt = readinput.get_source_model_lt(oq)
    for sm in smlt.gen_source_models(gsim_lt):
        sg = copy.copy(src_group)
        sg.id = sm.ordinal
        sm.src_groups = [sg]
        sg.sources = [sg[0].new(sm.ordinal, sm.name)]
        source_models.append(sm)
    return source.CompositeSourceModel(gsim_lt, smlt, source_models)
Exemple #11
0
    def test(self):
        d = os.path.dirname(os.path.dirname(__file__))
        source_model = os.path.join(d, 'source_model/multi-point-source.xml')
        groups = nrml.parse(source_model, SourceConverter(
            investigation_time=50., rupture_mesh_spacing=2.))
        site = Site(Point(0.1, 0.1), 800, True, z1pt0=100., z2pt5=1.)
        sitecol = SiteCollection([site])
        imtls = DictArray({'PGA': [0.01, 0.02, 0.04, 0.08, 0.16]})
        gsim_by_trt = {'Stable Continental Crust': Campbell2003()}
        hcurves = calc_hazard_curves(groups, sitecol, imtls, gsim_by_trt)
        expected = [0.99999778, 0.9084039, 0.148975348,
                    0.0036909656, 2.76326e-05]
        npt.assert_almost_equal(hcurves['PGA'][0], expected)

        # splitting in point sources
        [[mps1, mps2]] = groups
        psources = list(mps1) + list(mps2)
        hcurves = calc_hazard_curves(psources, sitecol, imtls, gsim_by_trt)
        npt.assert_almost_equal(hcurves['PGA'][0], expected)
    def test(self):
        d = os.path.dirname(os.path.dirname(__file__))
        source_model = os.path.join(d, 'source_model/multi-point-source.xml')
        groups = nrml.parse(source_model, SourceConverter(
            investigation_time=50., rupture_mesh_spacing=2.))
        site = Site(Point(0.1, 0.1), 800, True, z1pt0=100., z2pt5=1.)
        sitecol = SiteCollection([site])
        imtls = DictArray({'PGA': [0.01, 0.02, 0.04, 0.08, 0.16]})
        gsim_by_trt = {'Stable Continental Crust': Campbell2003()}
        hcurves = calc_hazard_curves(groups, sitecol, imtls, gsim_by_trt)
        expected = [0.99999778, 0.9084039, 0.148975348,
                    0.0036909656, 2.76326e-05]
        npt.assert_almost_equal(hcurves['PGA'][0], expected)

        # splitting in point sources
        [[mps1, mps2]] = groups
        psources = list(mps1) + list(mps2)
        hcurves = calc_hazard_curves(psources, sitecol, imtls, gsim_by_trt)
        npt.assert_almost_equal(hcurves['PGA'][0], expected)
Exemple #13
0
    def test_different_levels_ok(self):
        # the same IMT can appear with different levels in different
        # vulnerability functions
        vuln_content = writetmp(u"""\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns="http://openquake.org/xmlns/nrml/0.4"
      xmlns:gml="http://www.opengis.net/gml">
    <vulnerabilityModel>
        <discreteVulnerabilitySet vulnerabilitySetID="PAGER"
                                  assetCategory="population"
                                  lossCategory="occupants">
            <IML IMT="PGA">0.005 0.007 0.0098 0.0137</IML>
            <discreteVulnerability vulnerabilityFunctionID="RC/A"
                                   probabilisticDistribution="LN">
                <lossRatio>0.01 0.06 0.18 0.36</lossRatio>
                <coefficientsVariation>0.30 0.30 0.30 0.30
         </coefficientsVariation>
            </discreteVulnerability>
        </discreteVulnerabilitySet>
        <discreteVulnerabilitySet vulnerabilitySetID="PAGER"
                                  assetCategory="population"
                                  lossCategory="occupants">
            <IML IMT="PGA">0.004 0.008 0.037</IML>
            <discreteVulnerability vulnerabilityFunctionID="RC/B"
                                   probabilisticDistribution="LN">
                <lossRatio>0.01 0.06 0.18</lossRatio>
                <coefficientsVariation>0.30 0.30 0.30
         </coefficientsVariation>
            </discreteVulnerability>
        </discreteVulnerabilitySet>
    </vulnerabilityModel>
</nrml>
""")
        vfs = nrml.parse(vuln_content)
        assert_almost_equal(vfs['PGA', 'RC/A'].imls,
                            numpy.array([0.005, 0.007, 0.0098, 0.0137]))
        assert_almost_equal(vfs['PGA', 'RC/B'].imls,
                            numpy.array([0.004, 0.008, 0.037]))
Exemple #14
0
    def test_case_17(self):  # oversampling and save_ruptures
        expected = [
            'hazard_curve-mean.csv',
            'hazard_curve-rlz-001.csv',
            'hazard_curve-rlz-002.csv',
            'hazard_curve-rlz-003.csv',
            'hazard_curve-rlz-004.csv',
        ]
        # test --hc functionality, i.e. that the ruptures are read correctly
        out = self.run_calc(case_17.__file__, 'job.ini,job.ini', exports='csv')
        fnames = out['hcurves', 'csv']
        for exp, got in zip(expected, fnames):
            self.assertEqualFiles('expected/%s' % exp, got)

        # check that a single rupture file is exported even if there are
        # several collections
        [fname] = export(('ruptures', 'xml'), self.calc.datastore)
        self.assertEqualFiles('expected/ses.xml', fname)

        # check that the exported file is parseable
        rupcoll = nrml.parse(fname, RuptureConverter(1))
        self.assertEqual(list(rupcoll), [1])  # one group
        self.assertEqual(len(rupcoll[1]), 3)  # three EBRuptures
Exemple #15
0
    def test_wrong_files(self):
        # missing lossCategory
        with self.assertRaises(KeyError) as ctx:
            nrml.parse(self.wrong_csq_model_1)
        self.assertIn("node consequenceModel: 'lossCategory', line 3",
                      str(ctx.exception))

        # missing loss state
        with self.assertRaises(ValueError) as ctx:
            nrml.parse(self.wrong_csq_model_2)
        self.assertIn(
            "node consequenceFunction: Expected 4 limit"
            " states, got 3, line 9", str(ctx.exception))

        # inverted loss states
        with self.assertRaises(ValueError) as ctx:
            nrml.parse(self.wrong_csq_model_3)
        self.assertIn("node params: Expected 'ds3', got 'ds4', line 12",
                      str(ctx.exception))
Exemple #16
0
def get_risk_models(oqparam, kind=None):
    """
    :param oqparam:
        an OqParam instance
    :param kind:
        vulnerability|vulnerability_retrofitted|fragility|consequence;
        if None it is extracted from the oqparam.file_type attribute
    :returns:
        a dictionary taxonomy -> loss_type -> function
    """
    kind = kind or oqparam.file_type
    rmodels = {}
    for key in sorted(oqparam.inputs):
        mo = re.match('(occupants|%s)_%s$' % (COST_TYPE_REGEX, kind), key)
        if mo:
            key_type = mo.group(1)  # the cost_type in the key
            # can be occupants, structural, nonstructural, ...
            rmodel = nrml.parse(oqparam.inputs[key])
            rmodels[key_type] = rmodel
            if rmodel.lossCategory is None:  # NRML 0.4
                continue
            cost_type = str(rmodel.lossCategory)
            rmodel_kind = rmodel.__class__.__name__
            kind_ = kind.replace('_retrofitted', '')  # strip retrofitted
            if not rmodel_kind.lower().startswith(kind_):
                raise ValueError('Error in the file "%s_file=%s": is '
                                 'of kind %s, expected %s' %
                                 (key, oqparam.inputs[key], rmodel_kind,
                                  kind.capitalize() + 'Model'))
            if cost_type != key_type:
                raise ValueError(
                    'Error in the file "%s_file=%s": lossCategory is of type '
                    '"%s", expected "%s"' %
                    (key, oqparam.inputs[key], rmodel.lossCategory, key_type))
    rdict = collections.defaultdict(dict)
    if kind == 'fragility':
        limit_states = []
        for loss_type, fm in sorted(rmodels.items()):
            # build a copy of the FragilityModel with different IM levels
            newfm = fm.build(oqparam.continuous_fragility_discretization,
                             oqparam.steps_per_interval)
            for (imt, taxo), ffl in newfm.items():
                if not limit_states:
                    limit_states.extend(fm.limitStates)
                # we are rejecting the case of loss types with different
                # limit states; this may change in the future
                assert limit_states == fm.limitStates, (limit_states,
                                                        fm.limitStates)
                rdict[taxo][loss_type] = ffl
                # TODO: see if it is possible to remove the attribute
                # below, used in classical_damage
                ffl.steps_per_interval = oqparam.steps_per_interval
        oqparam.limit_states = [str(ls) for ls in limit_states]
    elif kind == 'consequence':
        rdict = rmodels
    else:  # vulnerability
        cl_risk = oqparam.calculation_mode in ('classical', 'classical_risk')
        # only for classical_risk reduce the loss_ratios
        # to make sure they are strictly increasing
        for loss_type, rm in rmodels.items():
            for (imt, taxo), rf in rm.items():
                rdict[taxo][loss_type] = (rf.strictly_increasing()
                                          if cl_risk else rf)
    return rdict
Exemple #17
0
    def test_invalid_srcs_weights_length(self):
        fname = writetmp('''\
<?xml version="1.0" encoding="utf-8"?>
<nrml
xmlns="http://openquake.org/xmlns/nrml/0.5"
xmlns:gml="http://www.opengis.net/gml"
>
    <sourceModel
    name="Classical Hazard QA Test, Case 1 source model"
    >
        <sourceGroup
        name="group 1"
        tectonicRegion="active shallow crust"
        srcs_weights="0.5 0.5"
        >
            <pointSource
            id="1"
            name="point source"
            >           
                <pointGeometry>
                    <gml:Point>
                        <gml:pos>
                            0.0000000E+00 0.0000000E+00
                        </gml:pos>
                    </gml:Point>
                    <upperSeismoDepth>
                        0.0000000E+00
                    </upperSeismoDepth>
                    <lowerSeismoDepth>
                        1.0000000E+01
                    </lowerSeismoDepth>
                </pointGeometry>
                <magScaleRel>
                    PeerMSR
                </magScaleRel>
                <ruptAspectRatio>
                    1.0000000E+00
                </ruptAspectRatio>
                <incrementalMFD
                binWidth="1.0000000E+00"
                minMag="4.0000000E+00"
                >
                    <occurRates>
                        1.0000000E+00
                    </occurRates>
                </incrementalMFD>
                <nodalPlaneDist>
                    <nodalPlane dip="90" probability="1" rake="0" strike="0"/>
                </nodalPlaneDist>
                <hypoDepthDist>
                    <hypoDepth depth="4" probability="1"/>
                </hypoDepthDist>
            </pointSource>
        </sourceGroup>
    </sourceModel>
</nrml>
''')
        converter = SourceConverter(50., 1., 10, 0.1, 10.)
        with self.assertRaises(ValueError) as ctx:
            parse(fname, converter)
        self.assertEqual('There are 2 srcs_weights but 1 source(s)',
                         str(ctx.exception))