def test_lr_eq_0_cov_gt_0(self): # If a vulnerability function loss ratio is 0 and its corresponding CoV # is > 0, a ValueError should be raised vuln_content = writetmp(u"""\ <?xml version='1.0' encoding='utf-8'?> <nrml xmlns="http://openquake.org/xmlns/nrml/0.4" xmlns:gml="http://www.opengis.net/gml"> <vulnerabilityModel> <discreteVulnerabilitySet vulnerabilitySetID="PAGER" assetCategory="population" lossCategory="occupants"> <IML IMT="PGV">0.005 0.007 0.0098 0.0137</IML> <discreteVulnerability vulnerabilityFunctionID="A" probabilisticDistribution="LN"> <lossRatio>0.00 0.06 0.18 0.36</lossRatio> <coefficientsVariation>0.30 0.30 0.30 0.30 </coefficientsVariation> </discreteVulnerability> </discreteVulnerabilitySet> </vulnerabilityModel> </nrml> """) with self.assertRaises(ValueError) as ar: nrml.parse(vuln_content) self.assertIn('It is not valid to define a loss ratio = 0.0 with a ' 'corresponding coeff. of variation > 0.0', str(ar.exception))
def test_lr_eq_0_cov_gt_0(self): # If a vulnerability function loss ratio is 0 and its corresponding CoV # is > 0, a ValueError should be raised vuln_content = writetmp(u"""\ <?xml version='1.0' encoding='utf-8'?> <nrml xmlns="http://openquake.org/xmlns/nrml/0.4" xmlns:gml="http://www.opengis.net/gml"> <vulnerabilityModel> <discreteVulnerabilitySet vulnerabilitySetID="PAGER" assetCategory="population" lossCategory="fatalities"> <IML IMT="PGV">0.005 0.007 0.0098 0.0137</IML> <discreteVulnerability vulnerabilityFunctionID="A" probabilisticDistribution="LN"> <lossRatio>0.00 0.06 0.18 0.36</lossRatio> <coefficientsVariation>0.30 0.30 0.30 0.30 </coefficientsVariation> </discreteVulnerability> </discreteVulnerabilitySet> </vulnerabilityModel> </nrml> """) with self.assertRaises(ValueError) as ar: nrml.parse(vuln_content) self.assertIn('It is not valid to define a loss ratio = 0.0 with a ' 'corresponding coeff. of variation > 0.0', ar.exception.message)
def validate_nrml(request): """ Leverage oq-risklib to check if a given XML text is a valid NRML :param request: a `django.http.HttpRequest` object containing the mandatory parameter 'xml_text': the text of the XML to be validated as NRML :returns: a JSON object, containing: * 'valid': a boolean indicating if the provided text is a valid NRML * 'error_msg': the error message, if any error was found (None otherwise) * 'error_line': line of the given XML where the error was found (None if no error was found or if it was not a validation error) """ xml_text = request.POST.get('xml_text') if not xml_text: return HttpResponseBadRequest( 'Please provide the "xml_text" parameter') xml_file = writetmp(xml_text, suffix='.xml') try: nrml.parse(xml_file) except etree.ParseError as exc: return _make_response(error_msg=exc.message.message, error_line=exc.message.lineno, valid=False) except Exception as exc: # get the exception message exc_msg = exc.args[0] if isinstance(exc_msg, bytes): exc_msg = exc_msg.decode('utf-8') # make it a unicode object elif isinstance(exc_msg, unicode): pass else: # if it is another kind of object, it is not obvious a priori how # to extract the error line from it return _make_response(error_msg=unicode(exc_msg), error_line=None, valid=False) # if the line is not mentioned, the whole message is taken error_msg = exc_msg.split(', line')[0] # check if the exc_msg contains a line number indication search_match = re.search(r'line \d+', exc_msg) if search_match: error_line = int(search_match.group(0).split()[1]) else: error_line = None return _make_response(error_msg=error_msg, error_line=error_line, valid=False) else: return _make_response(error_msg=None, error_line=None, valid=True)
def validate_nrml(request): """ Leverage oq-risklib to check if a given XML text is a valid NRML :param request: a `django.http.HttpRequest` object containing the mandatory parameter 'xml_text': the text of the XML to be validated as NRML :returns: a JSON object, containing: * 'valid': a boolean indicating if the provided text is a valid NRML * 'error_msg': the error message, if any error was found (None otherwise) * 'error_line': line of the given XML where the error was found (None if no error was found or if it was not a validation error) """ xml_text = request.POST.get('xml_text') if not xml_text: return HttpResponseBadRequest( 'Please provide the "xml_text" parameter') xml_file = writetmp(xml_text, suffix='.xml') try: nrml.parse(xml_file) except etree.ParseError as exc: return _make_response(error_msg=exc.message.message, error_line=exc.message.lineno, valid=False) except Exception as exc: # get the exception message exc_msg = exc.args[0] if isinstance(exc_msg, bytes): exc_msg = exc_msg.decode('utf-8') # make it a unicode object elif isinstance(exc_msg, unicode): pass else: # if it is another kind of object, it is not obvious a priori how # to extract the error line from it return _make_response( error_msg=unicode(exc_msg), error_line=None, valid=False) # if the line is not mentioned, the whole message is taken error_msg = exc_msg.split(', line')[0] # check if the exc_msg contains a line number indication search_match = re.search(r'line \d+', exc_msg) if search_match: error_line = int(search_match.group(0).split()[1]) else: error_line = None return _make_response( error_msg=error_msg, error_line=error_line, valid=False) else: return _make_response(error_msg=None, error_line=None, valid=True)
def get_risk_models(oqparam, kind=None): """ :param oqparam: an OqParam instance :param kind: vulnerability|vulnerability_retrofitted|fragility|consequence; if None it is extracted from the oqparam.file_type attribute :returns: a dictionary imt_taxo -> loss_type -> function """ kind = kind or oqparam.file_type rmodels = {} for key in oqparam.inputs: mo = re.match('(occupants|%s)_%s$' % (COST_TYPE_REGEX, kind), key) if mo: key_type = mo.group(1) # the cost_type in the key # can be occupants, structural, nonstructural, ... rmodel = nrml.parse(oqparam.inputs[key]) rmodels[cost_type_to_loss_type(key_type)] = rmodel if rmodel.lossCategory is None: # NRML 0.4 continue cost_type = str(rmodel.lossCategory) rmodel_kind = rmodel.__class__.__name__ kind_ = kind.replace('_retrofitted', '') # strip retrofitted if not rmodel_kind.lower().startswith(kind_): raise ValueError('Error in the file "%s_file=%s": is ' 'of kind %s, expected %s' % (key, oqparam.inputs[key], rmodel_kind, kind.capitalize() + 'Model')) if cost_type != key_type: raise ValueError( 'Error in the file "%s_file=%s": lossCategory is of type ' '"%s", expected "%s"' % (key, oqparam.inputs[key], rmodel.lossCategory, key_type)) rdict = collections.defaultdict(dict) if kind == 'fragility': limit_states = [] for loss_type, fm in sorted(rmodels.items()): # build a copy of the FragilityModel with different IM levels newfm = fm.build(oqparam.continuous_fragility_discretization, oqparam.steps_per_interval) for imt_taxo, ffl in newfm.items(): if not limit_states: limit_states.extend(fm.limitStates) # we are rejecting the case of loss types with different # limit states; this may change in the future assert limit_states == fm.limitStates, (limit_states, fm.limitStates) rdict[imt_taxo][loss_type] = ffl # TODO: see if it is possible to remove the attribute # below, used in classical_damage ffl.steps_per_interval = oqparam.steps_per_interval oqparam.limit_states = limit_states elif kind == 'consequence': rdict = rmodels else: # vulnerability for loss_type, rm in rmodels.items(): for imt_taxo, rf in rm.items(): rdict[imt_taxo][loss_type] = rf return rdict
def test_ok(self): fname = os.path.join(EXAMPLES_DIR, 'consequence-model.xml') cmodel = nrml.parse(fname) self.assertEqual( repr(cmodel), "<ConsequenceModel structural ds1, ds2, ds3, ds4 tax1>") # test pickleability pickle.loads(pickle.dumps(cmodel))
def test_ok(self): fname = os.path.join(EXAMPLES_DIR, 'consequence-model.xml') cmodel = nrml.parse(fname) self.assertEqual( repr(cmodel), "<ConsequenceModel structural " "['ds1', 'ds2', 'ds3', 'ds4'] tax1>") # test pickleability pickle.loads(pickle.dumps(cmodel))
def parse_groups(self, fname): """ Parse all the groups and return them ordered by number of sources. It does not count the ruptures, so it is relatively fast. :param fname: the full pathname of the source model file """ return nrml.parse(fname, self.converter)
def test_one_taxonomy_many_imts(self): # Should raise a ValueError if a vulnerabilityFunctionID is used for # multiple IMTs. # In this test input, we've defined two functions in separate sets # with the same ID and different IMTs. vuln_content = writetmp(u"""\ <?xml version='1.0' encoding='utf-8'?> <nrml xmlns="http://openquake.org/xmlns/nrml/0.4" xmlns:gml="http://www.opengis.net/gml"> <vulnerabilityModel> <discreteVulnerabilitySet vulnerabilitySetID="PAGER" assetCategory="population" lossCategory="fatalities"> <IML IMT="PGA">0.005 0.007 0.0098 0.0137</IML> <discreteVulnerability vulnerabilityFunctionID="A" probabilisticDistribution="LN"> <lossRatio>0.01 0.06 0.18 0.36</lossRatio> <coefficientsVariation>0.30 0.30 0.30 0.30 </coefficientsVariation> </discreteVulnerability> </discreteVulnerabilitySet> <discreteVulnerabilitySet vulnerabilitySetID="PAGER" assetCategory="population" lossCategory="fatalities"> <IML IMT="MMI">0.005 0.007 0.0098 0.0137</IML> <discreteVulnerability vulnerabilityFunctionID="A" probabilisticDistribution="LN"> <lossRatio>0.01 0.06 0.18 0.36</lossRatio> <coefficientsVariation>0.30 0.30 0.30 0.30 </coefficientsVariation> </discreteVulnerability> </discreteVulnerabilitySet> </vulnerabilityModel> </nrml> """) with self.assertRaises(InvalidFile) as ar: nrml.parse(vuln_content) self.assertIn('Duplicated vulnerabilityFunctionID: A', ar.exception.message)
def test_one_taxonomy_many_imts(self): # Should raise a ValueError if a vulnerabilityFunctionID is used for # multiple IMTs. # In this test input, we've defined two functions in separate sets # with the same ID and different IMTs. vuln_content = writetmp(u"""\ <?xml version='1.0' encoding='utf-8'?> <nrml xmlns="http://openquake.org/xmlns/nrml/0.4" xmlns:gml="http://www.opengis.net/gml"> <vulnerabilityModel> <discreteVulnerabilitySet vulnerabilitySetID="PAGER" assetCategory="population" lossCategory="occupants"> <IML IMT="PGA">0.005 0.007 0.0098 0.0137</IML> <discreteVulnerability vulnerabilityFunctionID="A" probabilisticDistribution="LN"> <lossRatio>0.01 0.06 0.18 0.36</lossRatio> <coefficientsVariation>0.30 0.30 0.30 0.30 </coefficientsVariation> </discreteVulnerability> </discreteVulnerabilitySet> <discreteVulnerabilitySet vulnerabilitySetID="PAGER" assetCategory="population" lossCategory="occupants"> <IML IMT="MMI">0.005 0.007 0.0098 0.0137</IML> <discreteVulnerability vulnerabilityFunctionID="A" probabilisticDistribution="LN"> <lossRatio>0.01 0.06 0.18 0.36</lossRatio> <coefficientsVariation>0.30 0.30 0.30 0.30 </coefficientsVariation> </discreteVulnerability> </discreteVulnerabilitySet> </vulnerabilityModel> </nrml> """) with self.assertRaises(InvalidFile) as ar: nrml.parse(vuln_content) self.assertIn('Duplicated vulnerabilityFunctionID: A', str(ar.exception))
def test_missing_maxIML(self): vuln_content = writetmp(u"""\ <?xml version='1.0' encoding='utf-8'?> <nrml xmlns:gml="http://www.opengis.net/gml" xmlns="http://openquake.org/xmlns/nrml/0.4"> <fragilityModel format="continuous"> <description>Fragility for test</description> <limitStates>LS1 LS2</limitStates> <ffs type="lognormal"> <taxonomy>RC</taxonomy> <IML IMT="PGA" minIML="9.9" imlUnit="g"/> <ffc ls="LS1"> <params mean="0.2" stddev="0.05" /> </ffc> <ffc ls="LS2"> <params mean="0.35" stddev="0.10" /> </ffc> </ffs> </fragilityModel> </nrml>""") with self.assertRaises(KeyError) as ar: nrml.parse(vuln_content) self.assertIn("node IML: 'maxIML', line 9", str(ar.exception))
def test_different_levels_ok(self): # the same IMT can appear with different levels in different # vulnerability functions vuln_content = writetmp(u"""\ <?xml version='1.0' encoding='utf-8'?> <nrml xmlns="http://openquake.org/xmlns/nrml/0.4" xmlns:gml="http://www.opengis.net/gml"> <vulnerabilityModel> <discreteVulnerabilitySet vulnerabilitySetID="PAGER" assetCategory="population" lossCategory="fatalities"> <IML IMT="PGA">0.005 0.007 0.0098 0.0137</IML> <discreteVulnerability vulnerabilityFunctionID="RC/A" probabilisticDistribution="LN"> <lossRatio>0.01 0.06 0.18 0.36</lossRatio> <coefficientsVariation>0.30 0.30 0.30 0.30 </coefficientsVariation> </discreteVulnerability> </discreteVulnerabilitySet> <discreteVulnerabilitySet vulnerabilitySetID="PAGER" assetCategory="population" lossCategory="fatalities"> <IML IMT="PGA">0.004 0.008 0.037</IML> <discreteVulnerability vulnerabilityFunctionID="RC/B" probabilisticDistribution="LN"> <lossRatio>0.01 0.06 0.18</lossRatio> <coefficientsVariation>0.30 0.30 0.30 </coefficientsVariation> </discreteVulnerability> </discreteVulnerabilitySet> </vulnerabilityModel> </nrml> """) vfs = nrml.parse(vuln_content) assert_almost_equal(vfs['PGA', 'RC/A'].imls, numpy.array([0.005, 0.007, 0.0098, 0.0137])) assert_almost_equal(vfs['PGA', 'RC/B'].imls, numpy.array([0.004, 0.008, 0.037]))
def test_different_levels_ok(self): # the same IMT can appear with different levels in different # vulnerability functions vuln_content = writetmp(u"""\ <?xml version='1.0' encoding='utf-8'?> <nrml xmlns="http://openquake.org/xmlns/nrml/0.4" xmlns:gml="http://www.opengis.net/gml"> <vulnerabilityModel> <discreteVulnerabilitySet vulnerabilitySetID="PAGER" assetCategory="population" lossCategory="occupants"> <IML IMT="PGA">0.005 0.007 0.0098 0.0137</IML> <discreteVulnerability vulnerabilityFunctionID="RC/A" probabilisticDistribution="LN"> <lossRatio>0.01 0.06 0.18 0.36</lossRatio> <coefficientsVariation>0.30 0.30 0.30 0.30 </coefficientsVariation> </discreteVulnerability> </discreteVulnerabilitySet> <discreteVulnerabilitySet vulnerabilitySetID="PAGER" assetCategory="population" lossCategory="occupants"> <IML IMT="PGA">0.004 0.008 0.037</IML> <discreteVulnerability vulnerabilityFunctionID="RC/B" probabilisticDistribution="LN"> <lossRatio>0.01 0.06 0.18</lossRatio> <coefficientsVariation>0.30 0.30 0.30 </coefficientsVariation> </discreteVulnerability> </discreteVulnerabilitySet> </vulnerabilityModel> </nrml> """) vfs = nrml.parse(vuln_content) assert_almost_equal(vfs['PGA', 'RC/A'].imls, numpy.array([0.005, 0.007, 0.0098, 0.0137])) assert_almost_equal(vfs['PGA', 'RC/B'].imls, numpy.array([0.004, 0.008, 0.037]))
def test_wrong_files(self): # missing lossCategory with self.assertRaises(KeyError) as ctx: nrml.parse(self.wrong_csq_model_1) self.assertIn("node consequenceModel: 'lossCategory', line 3", str(ctx.exception)) # missing loss state with self.assertRaises(ValueError) as ctx: nrml.parse(self.wrong_csq_model_2) self.assertIn("node consequenceFunction: Expected 4 limit" " states, got 3, line 9", str(ctx.exception)) # inverted loss states with self.assertRaises(ValueError) as ctx: nrml.parse(self.wrong_csq_model_3) self.assertIn("node params: Expected 'ds3', got 'ds4', line 12", str(ctx.exception))
def get_risk_models(oqparam, kind=None): """ :param oqparam: an OqParam instance :param kind: vulnerability|vulnerability_retrofitted|fragility|consequence; if None it is extracted from the oqparam.file_type attribute :returns: a dictionary taxonomy -> loss_type -> function """ kind = kind or oqparam.file_type rmodels = {} for key in oqparam.inputs: mo = re.match("(occupants|%s)_%s$" % (COST_TYPE_REGEX, kind), key) if mo: key_type = mo.group(1) # the cost_type in the key # can be occupants, structural, nonstructural, ... rmodel = nrml.parse(oqparam.inputs[key]) rmodels[key_type] = rmodel if rmodel.lossCategory is None: # NRML 0.4 continue cost_type = str(rmodel.lossCategory) rmodel_kind = rmodel.__class__.__name__ kind_ = kind.replace("_retrofitted", "") # strip retrofitted if not rmodel_kind.lower().startswith(kind_): raise ValueError( 'Error in the file "%s_file=%s": is ' "of kind %s, expected %s" % (key, oqparam.inputs[key], rmodel_kind, kind.capitalize() + "Model") ) if cost_type != key_type: raise ValueError( 'Error in the file "%s_file=%s": lossCategory is of type ' '"%s", expected "%s"' % (key, oqparam.inputs[key], rmodel.lossCategory, key_type) ) rdict = collections.defaultdict(dict) if kind == "fragility": limit_states = [] for loss_type, fm in sorted(rmodels.items()): # build a copy of the FragilityModel with different IM levels newfm = fm.build(oqparam.continuous_fragility_discretization, oqparam.steps_per_interval) for (imt, taxo), ffl in newfm.items(): if not limit_states: limit_states.extend(fm.limitStates) # we are rejecting the case of loss types with different # limit states; this may change in the future assert limit_states == fm.limitStates, (limit_states, fm.limitStates) rdict[taxo][loss_type] = ffl # TODO: see if it is possible to remove the attribute # below, used in classical_damage ffl.steps_per_interval = oqparam.steps_per_interval oqparam.limit_states = limit_states elif kind == "consequence": rdict = rmodels else: # vulnerability cl_risk = oqparam.calculation_mode in ("classical", "classical_risk") # only for classical_risk reduce the loss_ratios # to make sure they are strictly increasing for loss_type, rm in rmodels.items(): for (imt, taxo), rf in rm.items(): rdict[taxo][loss_type] = rf.strictly_increasing() if cl_risk else rf return rdict
def get_risk_models(oqparam, kind): """ :param oqparam: an OqParam instancs :param kind: "vulnerability"|"vulnerability_retrofitted"|"fragility"|"consequence" :returns: a dictionary imt_taxo -> loss_type -> function """ rmodels = {} for key in oqparam.inputs: mo = re.match( '(occupants|structural|nonstructural|contents|' 'business_interruption)_%s$' % kind, key) if mo: key_type = mo.group(1) # the cost_type in the key # can be occupants, structural, nonstructural, ... rmodel = nrml.parse(oqparam.inputs[key]) rmodels[cost_type_to_loss_type(key_type)] = rmodel if rmodel.lossCategory is None: # NRML 0.4 continue cost_type = str(rmodel.lossCategory) rmodel_kind = rmodel.__class__.__name__ kind_ = kind.replace('_retrofitted', '') # strip retrofitted if not rmodel_kind.lower().startswith(kind_): raise ValueError( 'Error in the .ini file: "%s_file=%s" points to a file ' 'of kind %s, expected %s' % ( key, oqparam.inputs[key], rmodel_kind, kind.capitalize() + 'Model')) if cost_type != key_type: raise ValueError( 'Error in the .ini file: "%s_file=%s" is of type "%s", ' 'expected "%s"' % (key, oqparam.inputs[key], rmodel.lossCategory, key_type)) rdict = collections.defaultdict(dict) if kind == 'fragility': limit_states = [] for loss_type, fm in sorted(rmodels.items()): # build a copy of the FragilityModel with different IM levels newfm = fm.build(oqparam.continuous_fragility_discretization, oqparam.steps_per_interval) for imt_taxo, ff in newfm.items(): if not limit_states: limit_states.extend(fm.limitStates) # we are rejecting the case of loss types with different # limit states; this may change in the future assert limit_states == fm.limitStates, ( limit_states, fm.limitStates) rdict[imt_taxo][loss_type] = ff # TODO: see if it is possible to remove the attribute # below, used in classical_damage ff.steps_per_interval = oqparam.steps_per_interval oqparam.limit_states = limit_states elif kind == 'consequence': rdict = rmodels else: # vulnerability for loss_type, rm in rmodels.items(): for imt_taxo, rf in rm.items(): rdict[imt_taxo][loss_type] = rf return rdict
def test_invalid_srcs_weights_length(self): fname = writetmp('''\ <?xml version="1.0" encoding="utf-8"?> <nrml xmlns="http://openquake.org/xmlns/nrml/0.5" xmlns:gml="http://www.opengis.net/gml" > <sourceModel name="Classical Hazard QA Test, Case 1 source model" > <sourceGroup name="group 1" tectonicRegion="active shallow crust" srcs_weights="0.5 0.5" > <pointSource id="1" name="point source" > <pointGeometry> <gml:Point> <gml:pos> 0.0000000E+00 0.0000000E+00 </gml:pos> </gml:Point> <upperSeismoDepth> 0.0000000E+00 </upperSeismoDepth> <lowerSeismoDepth> 1.0000000E+01 </lowerSeismoDepth> </pointGeometry> <magScaleRel> PeerMSR </magScaleRel> <ruptAspectRatio> 1.0000000E+00 </ruptAspectRatio> <incrementalMFD binWidth="1.0000000E+00" minMag="4.0000000E+00" > <occurRates> 1.0000000E+00 </occurRates> </incrementalMFD> <nodalPlaneDist> <nodalPlane dip="90" probability="1" rake="0" strike="0"/> </nodalPlaneDist> <hypoDepthDist> <hypoDepth depth="4" probability="1"/> </hypoDepthDist> </pointSource> </sourceGroup> </sourceModel> </nrml> ''') converter = SourceConverter(50., 1., 10, 0.1, 10.) with self.assertRaises(ValueError) as ctx: parse(fname, converter) self.assertEqual('There are 2 srcs_weights but 1 source(s)', str(ctx.exception))