Exemple #1
0
    def test_ill_formed_rupture(self):
        rup_file = StringIO('''\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">
    <simpleFaultRupture>
        <magnitude>7.65</magnitude>
        <rake>15.0</rake>
        <hypocenter lon="0.0" lat="0.0" depth="-5.0"/>
        <simpleFaultGeometry>
                <gml:LineString>
                    <gml:posList>
                        -124.704 40.363
                        -124.977 41.214
                        -125.140 42.096
                    </gml:posList>
                </gml:LineString>
            <dip>50.0</dip>
            <upperSeismoDepth>12.5</upperSeismoDepth>
            <lowerSeismoDepth>19.5</lowerSeismoDepth>
        </simpleFaultGeometry>
    </simpleFaultRupture>
</nrml>
''')

        # at line 7 there is an invalid depth="-5.0"
        with self.assertRaises(ValueError) as ctx:
            read_nodes(rup_file, filter_ruptures, s.ValidNode).next()
        self.assertIn('line 7', str(ctx.exception))
Exemple #2
0
    def test_ill_formed_rupture(self):
        rup_file = StringIO('''\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">
    <simpleFaultRupture>
        <magnitude>7.65</magnitude>
        <rake>15.0</rake>
        <hypocenter lon="0.0" lat="0.0" depth="-5.0"/>
        <simpleFaultGeometry>
                <gml:LineString>
                    <gml:posList>
                        -124.704 40.363
                        -124.977 41.214
                        -125.140 42.096
                    </gml:posList>
                </gml:LineString>
            <dip>50.0</dip>
            <upperSeismoDepth>12.5</upperSeismoDepth>
            <lowerSeismoDepth>19.5</lowerSeismoDepth>
        </simpleFaultGeometry>
    </simpleFaultRupture>
</nrml>
''')

        # at line 7 there is an invalid depth="-5.0"
        with self.assertRaises(ValueError) as ctx:
            read_nodes(rup_file, filter_ruptures, s.ValidNode).next()
        self.assertIn('line 7', str(ctx.exception))
Exemple #3
0
    def test_raises_useful_error_1(self):
        area_file = StringIO("""\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">
    <sourceModel name="Some Source Model">
        <areaSource id="1" name="Quito" tectonicRegion="Active Shallow Crust">
            <areaGeometry>
                <gml:Polygon>
                    <gml:exterior>
                        <gml:LinearRing>
                            <gml:posList>
                             -122.5 37.5
                             -121.5 37.5
                             -121.5 38.5
                             -122.5 38.5
                            </gml:posList>
                        </gml:LinearRing>
                    </gml:exterior>
                </gml:Polygon>
                <upperSeismoDepth>0.0</upperSeismoDepth>
                <lowerSeismoDepth>10.0</lowerSeismoDepth>
            </areaGeometry>
            <magScaleRel>PeerMSR</magScaleRel>
            <ruptAspectRatio>1.5</ruptAspectRatio>
            <incrementalMFD minMag="6.55" binWidth="0.1">
                <occurRates>-0.0010614989 8.8291627E-4 7.3437777E-4
                            6.108288E-4 5.080653E-4
                </occurRates>
            </incrementalMFD>
            <nodalPlaneDist>
                <nodalPlane probability="0.3" strike="0.0" dip="90.0" rake="0.0" />
                <nodalPlane probability="0.7" strike="90.0" dip="45.0" rake="90.0" />
            </nodalPlaneDist>
            <hypoDepthDist>
                <hypoDepth probability="0.5" depth="4.0" />
                <hypoDepth probability="0.5" depth="8.0" />
            </hypoDepthDist>
        </areaSource>

    </sourceModel>
</nrml>
""")
        msg = ('Could not convert occurRates->positivefloats: '
               'float -0.0010614989 < 0, line 25')
        with self.assertRaises(ValueError) as ctx:
            read_nodes(area_file, filter_sources, s.ValidNode).next()
        self.assertIn(msg, str(ctx.exception))
Exemple #4
0
    def test_raises_useful_error_1(self):
        area_file = StringIO("""\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">
    <sourceModel name="Some Source Model">
        <areaSource id="1" name="Quito" tectonicRegion="Active Shallow Crust">
            <areaGeometry>
                <gml:Polygon>
                    <gml:exterior>
                        <gml:LinearRing>
                            <gml:posList>
                             -122.5 37.5
                             -121.5 37.5
                             -121.5 38.5
                             -122.5 38.5
                            </gml:posList>
                        </gml:LinearRing>
                    </gml:exterior>
                </gml:Polygon>
                <upperSeismoDepth>0.0</upperSeismoDepth>
                <lowerSeismoDepth>10.0</lowerSeismoDepth>
            </areaGeometry>
            <magScaleRel>PeerMSR</magScaleRel>
            <ruptAspectRatio>1.5</ruptAspectRatio>
            <incrementalMFD minMag="6.55" binWidth="0.1">
                <occurRates>-0.0010614989 8.8291627E-4 7.3437777E-4
                            6.108288E-4 5.080653E-4
                </occurRates>
            </incrementalMFD>
            <nodalPlaneDist>
                <nodalPlane probability="0.3" strike="0.0" dip="90.0" rake="0.0" />
                <nodalPlane probability="0.7" strike="90.0" dip="45.0" rake="90.0" />
            </nodalPlaneDist>
            <hypoDepthDist>
                <hypoDepth probability="0.5" depth="4.0" />
                <hypoDepth probability="0.5" depth="8.0" />
            </hypoDepthDist>
        </areaSource>

    </sourceModel>
</nrml>
""")
        msg = ('Could not convert occurRates->positivefloats: '
               'float -0.0010614989 < 0, line 25')
        with self.assertRaises(ValueError) as ctx:
            read_nodes(area_file, filter_sources, s.ValidNode).next()
        self.assertIn(msg, str(ctx.exception))
Exemple #5
0
def parse_source_model(fname, converter, apply_uncertainties=lambda src: None):
    """
    Parse a NRML source model and return an ordered list of SourceCollector
    instances.

    :param str fname:
        the full pathname of the source model file
    :param converter:
        :class:`openquake.commonlib.source.SourceConverter` instance
    :param apply_uncertainties:
        a function modifying the sources (or do nothing)
    """
    converter.fname = fname
    source_stats_dict = {}
    source_ids = set()
    src_nodes = read_nodes(fname, lambda elem: 'Source' in elem.tag, ValidNode)
    for src_node in src_nodes:
        src = converter.convert_node(src_node)
        if src.source_id in source_ids:
            raise DuplicateID(
                'The source ID %s is duplicated!' % src.source_id)
        apply_uncertainties(src)
        trt = src.tectonic_region_type
        if trt not in source_stats_dict:
            source_stats_dict[trt] = SourceCollector(trt)
        source_stats_dict[trt].update(src)
        source_ids.add(src.source_id)

    # return ordered SourceCollectors
    return sorted(source_stats_dict.itervalues())
Exemple #6
0
def parse_source_model(fname, converter, apply_uncertainties=lambda src: None):
    """
    Parse a NRML source model and return an ordered list of SourceCollector
    instances.

    :param str fname:
        the full pathname of the source model file
    :param converter:
        :class:`openquake.commonlib.source.SourceConverter` instance
    :param apply_uncertainties:
        a function modifying the sources (or do nothing)
    """
    converter.fname = fname
    source_stats_dict = {}
    source_ids = set()
    src_nodes = read_nodes(fname, lambda elem: 'Source' in elem.tag, ValidNode)
    for src_node in src_nodes:
        src = converter.convert_node(src_node)
        if src.source_id in source_ids:
            raise DuplicateID('The source ID %s is duplicated!' %
                              src.source_id)
        apply_uncertainties(src)
        trt = src.tectonic_region_type
        if trt not in source_stats_dict:
            source_stats_dict[trt] = SourceCollector(trt)
        source_stats_dict[trt].update(src)
        source_ids.add(src.source_id)

    # return ordered SourceCollectors
    return sorted(source_stats_dict.itervalues())
Exemple #7
0
 def test_nonparametric_source_ok(self):
     converter = s.SourceConverter(
         investigation_time=50.,
         rupture_mesh_spacing=1,  # km
         width_of_mfd_bin=1.,  # for Truncated GR MFDs
         area_source_discretization=1.)
     np, = read_nodes(NONPARAMETRIC_SOURCE, filter_sources, s.ValidNode)
     converter.convert_node(np)
Exemple #8
0
 def test_nonparametric_source_ok(self):
     converter = s.SourceConverter(
         investigation_time=50.,
         rupture_mesh_spacing=1,  # km
         width_of_mfd_bin=1.,  # for Truncated GR MFDs
         area_source_discretization=1.)
     np, = read_nodes(NONPARAMETRIC_SOURCE, filter_sources, s.ValidNode)
     converter.convert_node(np)
Exemple #9
0
 def initialize_sources(self):
     """
     Get the rupture_model file from the job.ini file, and set the
     attribute self.rupture.
     """
     rup_spacing = self.job.get_param('rupture_mesh_spacing')
     rup_model = self.job.get_param('inputs')['rupture_model']
     rup_node, = read_nodes(rup_model, lambda el: 'Rupture' in el.tag,
                            ValidNode)
     self.rupture = RuptureConverter(rup_spacing).convert_node(rup_node)
Exemple #10
0
    def setUpClass(cls):
        converter = s.SourceConverter(
            investigation_time=50.,
            rupture_mesh_spacing=1,  # km
            width_of_mfd_bin=1.,  # for Truncated GR MFDs
            area_source_discretization=1.,  # km
        )
        source_nodes = read_nodes(MIXED_SRC_MODEL, filter_sources, s.ValidNode)
        (cls.area, cls.point, cls.simple, cls.cmplx, cls.char_simple,
         cls.char_complex, cls.char_multi) = map(
            converter.convert_node, source_nodes)

        # the parameters here would typically be specified in the job .ini
        cls.investigation_time = 50.
        cls.rupture_mesh_spacing = 1  # km
        cls.width_of_mfd_bin = 1.  # for Truncated GR MFDs
        cls.area_source_discretization = 1.  # km
        cls.converter = converter
Exemple #11
0
    def setUpClass(cls):
        converter = s.SourceConverter(
            investigation_time=50.,
            rupture_mesh_spacing=1,  # km
            width_of_mfd_bin=1.,  # for Truncated GR MFDs
            area_source_discretization=1.,  # km
        )
        source_nodes = read_nodes(MIXED_SRC_MODEL, filter_sources, s.ValidNode)
        (cls.area, cls.point, cls.simple, cls.cmplx, cls.char_simple,
         cls.char_complex, cls.char_multi) = map(converter.convert_node,
                                                 source_nodes)

        # the parameters here would typically be specified in the job .ini
        cls.investigation_time = 50.
        cls.rupture_mesh_spacing = 1  # km
        cls.width_of_mfd_bin = 1.  # for Truncated GR MFDs
        cls.area_source_discretization = 1.  # km
        cls.converter = converter
Exemple #12
0
def get_fragility_functions(fname):
    """
    :param fname:
        path of the fragility file
    :returns:
        damage_states list and dictionary taxonomy -> functions
    """
    [fmodel] = read_nodes(
        fname, lambda el: el.tag.endswith('fragilityModel'), FragilityNode)
    # ~fmodel.description is ignored
    limit_states = ~fmodel.limitStates
    tag = 'ffc' if fmodel['format'] == 'continuous' else 'ffd'
    fragility_functions = {}  # taxonomy -> functions
    for ffs in fmodel.getnodes('ffs'):
        nodamage = ffs.attrib.get('noDamageLimit')
        taxonomy = ~ffs.taxonomy
        imt_str, imls, min_iml, max_iml = ~ffs.IML
        fragility_functions[taxonomy] = List([], imt=imt_str, imls=imls)
        lstates = []
        for ff in ffs.getnodes(tag):
            lstates.append(ff['ls'])
            if tag == 'ffc':
                with context(fname, ff):
                    mean_stddev = ~ff.params
                fragility_functions[taxonomy].append(
                    scientific.FragilityFunctionContinuous(*mean_stddev))
            else:  # discrete
                with context(fname, ff):
                    poes = ~ff.poEs
                if nodamage is None:
                    fragility_functions[taxonomy].append(
                        scientific.FragilityFunctionDiscrete(
                            imls, poes, imls[0]))
                else:
                    fragility_functions[taxonomy].append(
                        scientific.FragilityFunctionDiscrete(
                            [nodamage] + imls, [0.0] + poes, nodamage))
        if lstates != limit_states:
            raise InvalidFile("Expected limit states %s, got %s in %s" %
                             (limit_states, lstates, fname))

    return ['no_damage'] + limit_states, fragility_functions
Exemple #13
0
def get_vulnerability_functions(fname):
    """
    :param fname:
        path of the vulnerability filter
    :returns:
        a dictionary imt, taxonomy -> vulnerability function
    """
    imts = set()
    taxonomies = set()
    vf_dict = {}  # imt, taxonomy -> vulnerability function
    for vset in read_nodes(fname, filter_vset, VulnerabilityNode):
        imt_str, imls, min_iml, max_iml = ~vset.IML
        if imt_str in imts:
            raise InvalidFile('Duplicated IMT %s: %s, line %d' %
                              (imt_str, fname, vset.imt.lineno))
        imts.add(imt_str)
        for vfun in vset.getnodes('discreteVulnerability'):
            taxonomy = vfun['vulnerabilityFunctionID']
            if taxonomy in taxonomies:
                raise InvalidFile(
                    'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
                    (taxonomy, fname, vfun.lineno))
            taxonomies.add(taxonomy)
            with context(fname, vfun):
                loss_ratios = ~vfun.lossRatio
                coefficients = ~vfun.coefficientsVariation
            if len(loss_ratios) != len(imls):
                raise InvalidFile(
                    'There are %d loss ratios, but %d imls: %s, line %d' %
                    (len(loss_ratios), len(imls), fname,
                     vfun.lossRatio.lineno))
            if len(coefficients) != len(imls):
                raise InvalidFile(
                    'There are %d coefficients, but %d imls: %s, line %d' %
                    (len(coefficients), len(imls), fname,
                     vfun.coefficientsVariation.lineno))
            with context(fname, vfun):
                vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction(
                    imt_str, imls, loss_ratios, coefficients,
                    vfun['probabilisticDistribution'])
    return vf_dict
Exemple #14
0
 def test_well_formed_ruptures(self):
     converter = s.RuptureConverter(rupture_mesh_spacing=1.5)
     for fname in (SIMPLE_FAULT_RUPTURE, COMPLEX_FAULT_RUPTURE,
                   SINGLE_PLANE_RUPTURE, MULTI_PLANES_RUPTURE):
         node, = read_nodes(fname, filter_ruptures, s.ValidNode)
         converter.convert_node(node)
Exemple #15
0
 def test_well_formed_ruptures(self):
     converter = s.RuptureConverter(rupture_mesh_spacing=1.5)
     for fname in (SIMPLE_FAULT_RUPTURE, COMPLEX_FAULT_RUPTURE,
                   SINGLE_PLANE_RUPTURE, MULTI_PLANES_RUPTURE):
         node, = read_nodes(fname, filter_ruptures, s.ValidNode)
         converter.convert_node(node)