Ejemplo n.º 1
0
    def test_ill_formed_rupture(self):
        rup_file = StringIO('''\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">
    <simpleFaultRupture>
        <magnitude>7.65</magnitude>
        <rake>15.0</rake>
        <hypocenter lon="0.0" lat="0.0" depth="-5.0"/>
        <simpleFaultGeometry>
                <gml:LineString>
                    <gml:posList>
                        -124.704 40.363
                        -124.977 41.214
                        -125.140 42.096
                    </gml:posList>
                </gml:LineString>
            <dip>50.0</dip>
            <upperSeismoDepth>12.5</upperSeismoDepth>
            <lowerSeismoDepth>19.5</lowerSeismoDepth>
        </simpleFaultGeometry>
    </simpleFaultRupture>
</nrml>
''')

        # at line 7 there is an invalid depth="-5.0"
        with self.assertRaises(ValueError) as ctx:
            read_nodes(rup_file, filter_ruptures, ValidNode).next()
        self.assertIn('line 7', str(ctx.exception))
Ejemplo n.º 2
0
    def test_raises_useful_error_1(self):
        area_file = StringIO("""\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">
    <sourceModel name="Some Source Model">
        <areaSource id="1" name="Quito" tectonicRegion="Active Shallow Crust">
            <areaGeometry>
                <gml:Polygon>
                    <gml:exterior>
                        <gml:LinearRing>
                            <gml:posList>
                             -122.5 37.5
                             -121.5 37.5
                             -121.5 38.5
                             -122.5 38.5
                            </gml:posList>
                        </gml:LinearRing>
                    </gml:exterior>
                </gml:Polygon>
                <upperSeismoDepth>0.0</upperSeismoDepth>
                <lowerSeismoDepth>10.0</lowerSeismoDepth>
            </areaGeometry>
            <magScaleRel>PeerMSR</magScaleRel>
            <ruptAspectRatio>1.5</ruptAspectRatio>
            <incrementalMFD minMag="6.55" binWidth="0.1">
                <occurRates>-0.0010614989 8.8291627E-4 7.3437777E-4
                            6.108288E-4 5.080653E-4
                </occurRates>
            </incrementalMFD>
            <nodalPlaneDist>
         <nodalPlane probability="0.3" strike="0.0" dip="90.0" rake="0.0" />
         <nodalPlane probability="0.7" strike="90.0" dip="45.0" rake="90.0" />
            </nodalPlaneDist>
            <hypoDepthDist>
                <hypoDepth probability="0.5" depth="4.0" />
                <hypoDepth probability="0.5" depth="8.0" />
            </hypoDepthDist>
        </areaSource>

    </sourceModel>
</nrml>
""")
        msg = ('Could not convert occurRates->positivefloats: '
               'float -0.0010614989 < 0, line 25')
        with self.assertRaises(ValueError) as ctx:
            read_nodes(area_file, filter_sources, ValidNode).next()
        self.assertIn(msg, str(ctx.exception))
Ejemplo n.º 3
0
def parse_source_model(fname, converter, apply_uncertainties=lambda src: None):
    """
    Parse a NRML source model and return an ordered list of TrtModel
    instances.

    :param str fname:
        the full pathname of the source model file
    :param converter:
        :class:`openquake.commonlib.source.SourceConverter` instance
    :param apply_uncertainties:
        a function modifying the sources (or do nothing)
    """
    converter.fname = fname
    source_stats_dict = {}
    source_ids = set()
    src_nodes = read_nodes(fname, lambda elem: 'Source' in elem.tag,
                           nodefactory['sourceModel'])
    for no, src_node in enumerate(src_nodes, 1):
        src = converter.convert_node(src_node)
        if src.source_id in source_ids:
            raise DuplicatedID('The source ID %s is duplicated!' %
                               src.source_id)
        apply_uncertainties(src)
        trt = src.tectonic_region_type
        if trt not in source_stats_dict:
            source_stats_dict[trt] = TrtModel(trt)
        source_stats_dict[trt].update(src)
        source_ids.add(src.source_id)
        if no % 10000 == 0:  # log every 10,000 sources parsed
            logging.info('Parsed %d sources from %s', no, fname)

    # return ordered TrtModels
    return sorted(source_stats_dict.values())
Ejemplo n.º 4
0
def get_vulnerability_functions(fname):
    """
    :param fname:
        path of the vulnerability filter
    :returns:
        a dictionary imt, taxonomy -> vulnerability function
    """
    # NB: the vulnerabilitySetID is not an unique ID!
    # it is right to have several vulnerability sets with the same ID
    # the IMTs can also be duplicated and with different levels, each
    # vulnerability function in a set will get its own levels
    imts = set()
    taxonomies = set()
    vf_dict = {}  # imt, taxonomy -> vulnerability function
    node = nrml.read(fname)
    if node['xmlns'] == 'http://openquake.org/xmlns/nrml/0.5':
        vmodel = node[0]
        for vfun in vmodel[1:]:  # the first node is the description
            imt = vfun.imls['imt']
            imls = numpy.array(~vfun.imls)
            taxonomy = vfun['id']
            loss_ratios, probs = [], []
            for probabilities in vfun[1:]:
                loss_ratios.append(probabilities['lr'])
                probs.append(valid.probabilities(~probabilities))
            probs = numpy.array(probs)
            assert probs.shape == (len(loss_ratios), len(imls))
            vf_dict[imt, taxonomy] = scientific.VulnerabilityFunctionWithPMF(
                taxonomy, imt, imls, numpy.array(loss_ratios), probs)
        return vf_dict
    # otherwise, read the old format (NRML 0.4)
    for vset in read_nodes(fname, filter_vset,
                           nodefactory['vulnerabilityModel']):
        imt_str, imls, min_iml, max_iml, imlUnit = ~vset.IML
        imts.add(imt_str)
        for vfun in vset.getnodes('discreteVulnerability'):
            taxonomy = vfun['vulnerabilityFunctionID']
            if taxonomy in taxonomies:
                raise InvalidFile(
                    'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
                    (taxonomy, fname, vfun.lineno))
            taxonomies.add(taxonomy)
            with context(fname, vfun):
                loss_ratios = ~vfun.lossRatio
                coefficients = ~vfun.coefficientsVariation
            if len(loss_ratios) != len(imls):
                raise InvalidFile(
                    'There are %d loss ratios, but %d imls: %s, line %d' %
                    (len(loss_ratios), len(imls), fname,
                     vfun.lossRatio.lineno))
            if len(coefficients) != len(imls):
                raise InvalidFile(
                    'There are %d coefficients, but %d imls: %s, line %d' %
                    (len(coefficients), len(imls), fname,
                     vfun.coefficientsVariation.lineno))
            with context(fname, vfun):
                vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction(
                    taxonomy, imt_str, imls, loss_ratios, coefficients,
                    vfun['probabilisticDistribution'])
    return vf_dict
Ejemplo n.º 5
0
    def parse_sources(self, fname):
        """
        Parse all the sources and return them ordered by tectonic region type.
        It does not count the ruptures, so it is relatively fast.

        :param fname:
            the full pathname of the source model file
        """
        sources = []
        source_ids = set()
        self.converter.fname = fname
        src_nodes = read_nodes(fname, lambda elem: 'Source' in elem.tag,
                               nodefactory['sourceModel'])
        for no, src_node in enumerate(src_nodes, 1):
            src = self.converter.convert_node(src_node)
            if src.source_id in source_ids:
                raise DuplicatedID('The source ID %s is duplicated!' %
                                   src.source_id)
            sources.append(src)
            source_ids.add(src.source_id)
            if no % 10000 == 0:  # log every 10,000 sources parsed
                logging.info('Parsed %d sources from %s', no, fname)
        if no % 10000 != 0:
            logging.info('Parsed %d sources from %s', no, fname)
        return sorted(sources, key=operator.attrgetter('tectonic_region_type'))
Ejemplo n.º 6
0
 def test_well_formed_ruptures(self):
     converter = s.RuptureConverter(rupture_mesh_spacing=1.5,
                                    complex_fault_mesh_spacing=1.5)
     for fname in (SIMPLE_FAULT_RUPTURE, COMPLEX_FAULT_RUPTURE,
                   SINGLE_PLANE_RUPTURE, MULTI_PLANES_RUPTURE):
         node, = read_nodes(fname, filter_ruptures, ValidNode)
         converter.convert_node(node)
Ejemplo n.º 7
0
def parse_source_model(fname, converter, apply_uncertainties=lambda src: None):
    """
    Parse a NRML source model and return an ordered list of TrtModel
    instances.

    :param str fname:
        the full pathname of the source model file
    :param converter:
        :class:`openquake.commonlib.source.SourceConverter` instance
    :param apply_uncertainties:
        a function modifying the sources (or do nothing)
    """
    converter.fname = fname
    source_stats_dict = {}
    source_ids = set()
    src_nodes = read_nodes(fname, lambda elem: 'Source' in elem.tag,
                           nodefactory['sourceModel'])
    for no, src_node in enumerate(src_nodes, 1):
        src = converter.convert_node(src_node)
        if src.source_id in source_ids:
            raise DuplicatedID(
                'The source ID %s is duplicated!' % src.source_id)
        apply_uncertainties(src)
        trt = src.tectonic_region_type
        if trt not in source_stats_dict:
            source_stats_dict[trt] = TrtModel(trt)
        source_stats_dict[trt].update(src)
        source_ids.add(src.source_id)
        if no % 10000 == 0:  # log every 10,000 sources parsed
            logging.info('Parsed %d sources from %s', no, fname)

    # return ordered TrtModels
    return sorted(source_stats_dict.itervalues())
def xml_to_csv(input_xml, output_csv):
    """
    Parses the site model from an input xml file to a headed csv file
    """
    # Read in from XML
    sites = read_nodes(input_xml, lambda el: el.tag.endswith("site"),
                       nrml.nodefactory["siteModel"])
    fid = open(output_csv, "w")
    print >> fid, "%s" % "longitude,latitude,vs30,vs30Type,z1pt0,z2pt5,backarc"
    for site in sites:
        if "backarc" in site.attrib:
            if ast.literal_eval(site.attrib["backarc"]):
                site.attrib["backarc"] = 1
            else:
                site.attrib["backarc"] = 0

        else:
            site.attrib["backarc"] = 0

        if site["vs30Type"] == "measured":
            vs30_type = 1
        else:
            vs30_type = 0

        print >> fid, "%s" % ",".join([
            site["lon"], site["lat"], site["vs30"],
            str(vs30_type), site["z1pt0"], site["z2pt5"],
            str(site["backarc"])])
    fid.close()
Ejemplo n.º 9
0
 def test_alternative_mfds(self):
     converter = s.SourceConverter(
         investigation_time=1.,
         rupture_mesh_spacing=1,  # km
         complex_fault_mesh_spacing=5,  # km
         width_of_mfd_bin=0.1,  # for Truncated GR MFDs
         area_source_discretization=1.)
     source_nodes = read_nodes(ALT_MFDS_SRC_MODEL,
                               filter_sources,
                               ValidNode)
     [cplx1, sflt1, sflt2] = map(converter.convert_node, source_nodes)
     # Check the values
     # Arbitrary MFD
     assert_close(cplx1.mfd.magnitudes, [8.6, 8.8, 9.0])
     assert_close(cplx1.mfd.occurrence_rates, [0.0006, 0.0008, 0.0004])
     # Youngs & Coppersmith from characteristic rate
     self.assertAlmostEqual(sflt1.mfd.b_val, 1.0)
     self.assertAlmostEqual(sflt1.mfd.a_val, 3.3877843113)
     self.assertAlmostEqual(sflt1.mfd.char_mag, 7.0)
     self.assertAlmostEqual(sflt1.mfd.char_rate, 0.005)
     self.assertAlmostEqual(sflt1.mfd.min_mag, 5.0)
     # Youngs & Coppersmith from total moment rate
     self.assertAlmostEqual(sflt2.mfd.b_val, 1.0)
     self.assertAlmostEqual(sflt2.mfd.a_val, 5.0800, 3)
     self.assertAlmostEqual(sflt2.mfd.char_mag, 7.0)
     self.assertAlmostEqual(sflt2.mfd.char_rate, 0.24615, 5)
     self.assertAlmostEqual(sflt2.mfd.min_mag, 5.0)
Ejemplo n.º 10
0
 def test_well_formed_ruptures(self):
     converter = s.RuptureConverter(rupture_mesh_spacing=1.5,
                                    complex_fault_mesh_spacing=1.5)
     for fname in (SIMPLE_FAULT_RUPTURE, COMPLEX_FAULT_RUPTURE,
                   SINGLE_PLANE_RUPTURE, MULTI_PLANES_RUPTURE):
         node, = read_nodes(fname, filter_ruptures, ValidNode)
         converter.convert_node(node)
Ejemplo n.º 11
0
def read_data(fileobj):
    """
    Convert a file into a generator over rows.

    :param fileobj: the XML files containing the GMFs
    :returns: (imts, rupture_tags, rows)
    """
    tags = collections.defaultdict(set)
    rows = []
    for gmf in read_nodes(
            fileobj, lambda n: n.tag.endswith('gmf'), GmfNode):
        tag = gmf['ruptureId']
        imt = gmf['IMT']
        if imt == 'SA':
            imt = 'SA(%s)' % gmf['saPeriod']
        data = []
        for node in gmf:
            data.append(('POINT(%(lon)s %(lat)s)' % node, node['gmv']))
        if tag in tags[imt]:
            raise DuplicatedTag(tag)
        tags[imt].add(tag)
        rows.append((imt, tag, data))
    # check consistency of the tags
    expected_tags = tags[imt]
    for tagvalues in tags.values():
        assert tagvalues == expected_tags, (expected_tags, tagvalues)
    return set(tags), sorted(expected_tags), rows
Ejemplo n.º 12
0
    def parse_sources(self, fname):
        """
        Parse all the sources and return them ordered by tectonic region type.
        It does not count the ruptures, so it is relatively fast.

        :param fname:
            the full pathname of the source model file
        """
        sources = []
        source_ids = set()
        self.converter.fname = fname
        src_nodes = read_nodes(fname, lambda elem: 'Source' in elem.tag,
                               nodefactory['sourceModel'])
        for no, src_node in enumerate(src_nodes, 1):
            src = self.converter.convert_node(src_node)
            if src.source_id in source_ids:
                raise DuplicatedID(
                    'The source ID %s is duplicated!' % src.source_id)
            sources.append(src)
            source_ids.add(src.source_id)
            if no % 10000 == 0:  # log every 10,000 sources parsed
                logging.info('Parsed %d sources from %s', no, fname)
        if no % 10000 != 0:
            logging.info('Parsed %d sources from %s', no, fname)
        return sorted(sources, key=operator.attrgetter('tectonic_region_type'))
Ejemplo n.º 13
0
    def test_ill_formed_rupture(self):
        rup_file = BytesIO(b'''\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">
    <simpleFaultRupture>
        <magnitude>7.65</magnitude>
        <rake>15.0</rake>
        <hypocenter lon="0.0" lat="0.0" depth="-5.0"/>
        <simpleFaultGeometry>
                <gml:LineString>
                    <gml:posList>
                        -124.704 40.363
                        -124.977 41.214
                        -125.140 42.096
                    </gml:posList>
                </gml:LineString>
            <dip>50.0</dip>
            <upperSeismoDepth>12.5</upperSeismoDepth>
            <lowerSeismoDepth>19.5</lowerSeismoDepth>
        </simpleFaultGeometry>
    </simpleFaultRupture>
</nrml>
''')

        # at line 7 there is an invalid depth="-5.0"
        with self.assertRaises(ValueError) as ctx:
            next(read_nodes(rup_file, filter_ruptures, ValidNode))
        self.assertIn('line 7', str(ctx.exception))
Ejemplo n.º 14
0
def read_data(fileobj):
    """
    Convert a file into a generator over rows.

    :param fileobj: the XML files containing the GMFs
    :returns: (imts, rupture_tags, rows)
    """
    tags = collections.defaultdict(set)
    rows = []
    for gmf in read_nodes(
            fileobj, lambda n: n.tag.endswith('gmf'), GmfNode):
        tag = gmf['ruptureId']
        imt = gmf['IMT']
        if imt == 'SA':
            imt = 'SA(%s)' % gmf['saPeriod']
        data = []
        for node in gmf:
            data.append(('POINT(%(lon)s %(lat)s)' % node, node['gmv']))
        if tag in tags[imt]:
            raise DuplicatedTag(tag)
        tags[imt].add(tag)
        rows.append((imt, tag, data))
    # check consistency of the tags
    expected_tags = tags[imt]
    for tagvalues in tags.values():
        assert tagvalues == expected_tags, (expected_tags, tagvalues)
    return set(tags), sorted(expected_tags), rows
Ejemplo n.º 15
0
 def test_nonparametric_source_ok(self):
     converter = s.SourceConverter(
         investigation_time=50.,
         rupture_mesh_spacing=1,  # km
         complex_fault_mesh_spacing=1,  # km
         width_of_mfd_bin=1.,  # for Truncated GR MFDs
         area_source_discretization=1.)
     np, = read_nodes(NONPARAMETRIC_SOURCE, filter_sources, ValidNode)
     converter.convert_node(np)
Ejemplo n.º 16
0
 def test_nonparametric_source_ok(self):
     converter = s.SourceConverter(
         investigation_time=50.,
         rupture_mesh_spacing=1,  # km
         complex_fault_mesh_spacing=1,  # km
         width_of_mfd_bin=1.,  # for Truncated GR MFDs
         area_source_discretization=1.)
     np, = read_nodes(NONPARAMETRIC_SOURCE, filter_sources, ValidNode)
     converter.convert_node(np)
Ejemplo n.º 17
0
def get_rupture(oqparam):
    """
    Returns a hazardlib rupture by reading the `rupture_model` file.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    """
    rup_model = oqparam.inputs["rupture_model"]
    rup_node, = read_nodes(rup_model, lambda el: "Rupture" in el.tag, source.nodefactory["sourceModel"])
    conv = sourceconverter.RuptureConverter(oqparam.rupture_mesh_spacing, oqparam.complex_fault_mesh_spacing)
    return conv.convert_node(rup_node)
Ejemplo n.º 18
0
def build_rupture_from_file(rupture_file, simple_mesh_spacing=1.0,
        complex_mesh_spacing=1.0):
    """
    Parses a rupture from the OpenQuake nrml 4.0 format and parses it to
    an instance of :class: openquake.hazardlib.source.rupture.Rupture
    """
    rup_node, = read_nodes(rupture_file, lambda el: 'Rupture' in el.tag,
                           nodefactory['sourceModel'])
    conv = RuptureConverter(simple_mesh_spacing,
                            complex_mesh_spacing)
    return conv.convert_node(rup_node)
Ejemplo n.º 19
0
def get_site_model(oqparam):
    """
    Convert the NRML file into an iterator over 6-tuple of the form
    (z1pt0, z2pt5, measured, vs30, lon, lat)

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    """
    for node in read_nodes(oqparam.inputs['site_model'],
                           lambda el: el.tag.endswith('site'),
                           source.nodefactory['siteModel']):
        yield valid.site_param(**node.attrib)
Ejemplo n.º 20
0
def get_site_model(oqparam):
    """
    Convert the NRML file into an iterator over 6-tuple of the form
    (z1pt0, z2pt5, measured, vs30, lon, lat)

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    """
    for node in read_nodes(oqparam.inputs['site_model'],
                           lambda el: el.tag.endswith('site'),
                           source.nodefactory['siteModel']):
        yield valid.site_param(**node.attrib)
Ejemplo n.º 21
0
def get_rupture(oqparam):
    """
    Returns a hazardlib rupture by reading the `rupture_model` file.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    """
    rup_model = oqparam.inputs['rupture_model']
    rup_node, = read_nodes(rup_model, lambda el: 'Rupture' in el.tag,
                           source.nodefactory['sourceModel'])
    conv = sourceconverter.RuptureConverter(oqparam.rupture_mesh_spacing,
                                            oqparam.complex_fault_mesh_spacing)
    return conv.convert_node(rup_node)
Ejemplo n.º 22
0
    def test_raises_useful_error_2(self):
        area_file = BytesIO(b"""\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">
    <sourceModel name="Some Source Model">
        <areaSource id="1" name="Quito" tectonicRegion="Active Shallow Crust">
            <areaGeometry>
                <gml:Polygon>
                    <gml:exterior>
                        <gml:LinearRing>
                            <gml:posList>
                             -122.5 37.5
                             -121.5 37.5
                             -121.5 38.5
                             -122.5 38.5
                            </gml:posList>
                        </gml:LinearRing>
                    </gml:exterior>
                </gml:Polygon>
                <upperSeismoDepth>0.0</upperSeismoDepth>
                <lowerSeismoDepth>10.0</lowerSeismoDepth>
            </areaGeometry>
            <magScaleRel>PeerMSR</magScaleRel>
            <ruptAspectRatio>1.5</ruptAspectRatio>
            <incrementalMFD minMag="6.55" binWidth="0.1">
                <occurRates>0.0010614989 8.8291627E-4 7.3437777E-4
                            6.108288E-4 5.080653E-4
                </occurRates>
            </incrementalMFD>
            <nodalPlanedist>
         <nodalPlane probability="0.3" strike="0.0" dip="90.0" rake="0.0" />
         <nodalPlane probability="0.7" strike="90.0" dip="45.0" rake="90.0" />
            </nodalPlanedist>
            <hypoDepthDist>
                <hypoDepth probability="0.5" depth="4.0" />
                <hypoDepth probability="0.5" depth="8.0" />
            </hypoDepthDist>
        </areaSource>

    </sourceModel>
</nrml>
""")
        [area] = read_nodes(area_file, filter_sources, ValidNode)
        with self.assertRaises(NameError) as ctx:
            self.parser.converter.convert_node(area)
        self.assertIn(
            "node areaSource: No subnode named 'nodalPlaneDist'"
            " found in 'areaSource', line 5 of", str(ctx.exception))
Ejemplo n.º 23
0
    def test_raises_useful_error_2(self):
        area_file = BytesIO(b"""\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">
    <sourceModel name="Some Source Model">
        <areaSource id="1" name="Quito" tectonicRegion="Active Shallow Crust">
            <areaGeometry>
                <gml:Polygon>
                    <gml:exterior>
                        <gml:LinearRing>
                            <gml:posList>
                             -122.5 37.5
                             -121.5 37.5
                             -121.5 38.5
                             -122.5 38.5
                            </gml:posList>
                        </gml:LinearRing>
                    </gml:exterior>
                </gml:Polygon>
                <upperSeismoDepth>0.0</upperSeismoDepth>
                <lowerSeismoDepth>10.0</lowerSeismoDepth>
            </areaGeometry>
            <magScaleRel>PeerMSR</magScaleRel>
            <ruptAspectRatio>1.5</ruptAspectRatio>
            <incrementalMFD minMag="6.55" binWidth="0.1">
                <occurRates>0.0010614989 8.8291627E-4 7.3437777E-4
                            6.108288E-4 5.080653E-4
                </occurRates>
            </incrementalMFD>
            <nodalPlanedist>
         <nodalPlane probability="0.3" strike="0.0" dip="90.0" rake="0.0" />
         <nodalPlane probability="0.7" strike="90.0" dip="45.0" rake="90.0" />
            </nodalPlanedist>
            <hypoDepthDist>
                <hypoDepth probability="0.5" depth="4.0" />
                <hypoDepth probability="0.5" depth="8.0" />
            </hypoDepthDist>
        </areaSource>

    </sourceModel>
</nrml>
""")
        [area] = read_nodes(area_file, filter_sources, ValidNode)
        with self.assertRaises(NameError) as ctx:
            self.converter.convert_node(area)
        self.assertIn(
            "node areaSource: No subnode named 'nodalPlaneDist'"
            " found in 'areaSource', line 5 of", str(ctx.exception))
Ejemplo n.º 24
0
def get_vulnerability_functions_04(fname):
    """
    Parse the vulnerability model in NRML 0.4 format.

    :param fname:
        path of the vulnerability file
    :returns:
        a dictionary imt, taxonomy -> vulnerability function + vset
    """
    categories = dict(assetCategory=set(),
                      lossCategory=set(),
                      vulnerabilitySetID=set())
    imts = set()
    taxonomies = set()
    vf_dict = {}  # imt, taxonomy -> vulnerability function
    for vset in read_nodes(fname, filter_vset,
                           nodefactory['vulnerabilityModel']):
        categories['assetCategory'].add(vset['assetCategory'])
        categories['lossCategory'].add(vset['lossCategory'])
        categories['vulnerabilitySetID'].add(vset['vulnerabilitySetID'])
        imt_str, imls, min_iml, max_iml, imlUnit = ~vset.IML
        imts.add(imt_str)
        for vfun in vset.getnodes('discreteVulnerability'):
            taxonomy = vfun['vulnerabilityFunctionID']
            if taxonomy in taxonomies:
                raise InvalidFile(
                    'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
                    (taxonomy, fname, vfun.lineno))
            taxonomies.add(taxonomy)
            with context(fname, vfun):
                loss_ratios = ~vfun.lossRatio
                coefficients = ~vfun.coefficientsVariation
            if len(loss_ratios) != len(imls):
                raise InvalidFile(
                    'There are %d loss ratios, but %d imls: %s, line %d' %
                    (len(loss_ratios), len(imls), fname,
                     vfun.lossRatio.lineno))
            if len(coefficients) != len(imls):
                raise InvalidFile(
                    'There are %d coefficients, but %d imls: %s, line %d' %
                    (len(coefficients), len(imls), fname,
                     vfun.coefficientsVariation.lineno))
            with context(fname, vfun):
                vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction(
                    taxonomy, imt_str, imls, loss_ratios, coefficients,
                    vfun['probabilisticDistribution'])
    categories['id'] = '_'.join(sorted(categories['vulnerabilitySetID']))
    del categories['vulnerabilitySetID']
    return vf_dict, categories
Ejemplo n.º 25
0
def get_fragility_functions(fname, continuous_fragility_discretization):
    """
    :param fname:
        path of the fragility file
    :returns:
        damage_states list and dictionary taxonomy -> functions
    """
    [fmodel] = read_nodes(
        fname, lambda el: el.tag.endswith('fragilityModel'),
        nodefactory['fragilityModel'])
    # ~fmodel.description is ignored
    limit_states = ~fmodel.limitStates
    tag = 'ffc' if fmodel['format'] == 'continuous' else 'ffd'
    fragility_functions = AccumDict()  # taxonomy -> functions
    for ffs in fmodel.getnodes('ffs'):
        nodamage = ffs.attrib.get('noDamageLimit')
        taxonomy = ~ffs.taxonomy
        imt_str, imls, min_iml, max_iml, imlUnit = ~ffs.IML
        if continuous_fragility_discretization and not imls:
            imls = numpy.linspace(min_iml, max_iml,
                                  continuous_fragility_discretization + 1)
        fragility_functions[taxonomy] = FragilityFunctionList(
            [], imt=imt_str, imls=imls)
        lstates = []
        for ff in ffs.getnodes(tag):
            ls = ff['ls']  # limit state
            lstates.append(ls)
            if tag == 'ffc':
                with context(fname, ff):
                    mean_stddev = ~ff.params
                fragility_functions[taxonomy].append(
                    scientific.FragilityFunctionContinuous(ls, *mean_stddev))
            else:  # discrete
                with context(fname, ff):
                    poes = ~ff.poEs
                if nodamage is None:
                    fragility_functions[taxonomy].append(
                        scientific.FragilityFunctionDiscrete(
                            ls, imls, poes, imls[0]))
                else:
                    fragility_functions[taxonomy].append(
                        scientific.FragilityFunctionDiscrete(
                            ls, [nodamage] + imls, [0.0] + poes, nodamage))
        if lstates != limit_states:
            raise InvalidFile("Expected limit states %s, got %s in %s" %
                             (limit_states, lstates, fname))

    fragility_functions.damage_states = ['no_damage'] + limit_states
    return fragility_functions
Ejemplo n.º 26
0
def get_vulnerability_functions_04(fname):
    """
    Parse the vulnerability model in NRML 0.4 format.

    :param fname:
        path of the vulnerability file
    :returns:
        a dictionary imt, taxonomy -> vulnerability function + vset
    """
    categories = dict(assetCategory=set(), lossCategory=set(),
                      vulnerabilitySetID=set())
    imts = set()
    taxonomies = set()
    vf_dict = {}  # imt, taxonomy -> vulnerability function
    for vset in read_nodes(fname, filter_vset,
                           nodefactory['vulnerabilityModel']):
        categories['assetCategory'].add(vset['assetCategory'])
        categories['lossCategory'].add(vset['lossCategory'])
        categories['vulnerabilitySetID'].add(vset['vulnerabilitySetID'])
        imt_str, imls, min_iml, max_iml, imlUnit = ~vset.IML
        imts.add(imt_str)
        for vfun in vset.getnodes('discreteVulnerability'):
            taxonomy = vfun['vulnerabilityFunctionID']
            if taxonomy in taxonomies:
                raise InvalidFile(
                    'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
                    (taxonomy, fname, vfun.lineno))
            taxonomies.add(taxonomy)
            with context(fname, vfun):
                loss_ratios = ~vfun.lossRatio
                coefficients = ~vfun.coefficientsVariation
            if len(loss_ratios) != len(imls):
                raise InvalidFile(
                    'There are %d loss ratios, but %d imls: %s, line %d' %
                    (len(loss_ratios), len(imls), fname,
                     vfun.lossRatio.lineno))
            if len(coefficients) != len(imls):
                raise InvalidFile(
                    'There are %d coefficients, but %d imls: %s, line %d' %
                    (len(coefficients), len(imls), fname,
                     vfun.coefficientsVariation.lineno))
            with context(fname, vfun):
                vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction(
                    taxonomy, imt_str, imls, loss_ratios, coefficients,
                    vfun['probabilisticDistribution'])
    categories['id'] = '_'.join(sorted(categories['vulnerabilitySetID']))
    del categories['vulnerabilitySetID']
    return vf_dict, categories
Ejemplo n.º 27
0
 def setUpClass(cls):
     cls.converter = s.SourceConverter(
         investigation_time=50.,
         rupture_mesh_spacing=1,  # km
         complex_fault_mesh_spacing=1,  # km
         width_of_mfd_bin=1.,  # for Truncated GR MFDs
         area_source_discretization=1.,  # km
     )
     source_nodes = read_nodes(MIXED_SRC_MODEL, filter_sources, ValidNode)
     (cls.area, cls.point, cls.simple, cls.cmplx, cls.char_simple,
      cls.char_complex, cls.char_multi) = map(cls.converter.convert_node,
                                              source_nodes)
     # the parameters here would typically be specified in the job .ini
     cls.investigation_time = 50.
     cls.rupture_mesh_spacing = 1  # km
     cls.complex_fault_mesh_spacing = 1  # km
     cls.width_of_mfd_bin = 1.  # for Truncated GR MFDs
     cls.area_source_discretization = 1.  # km
Ejemplo n.º 28
0
 def setUpClass(cls):
     cls.converter = s.SourceConverter(
         investigation_time=50.,
         rupture_mesh_spacing=1,  # km
         complex_fault_mesh_spacing=1,  # km
         width_of_mfd_bin=1.,  # for Truncated GR MFDs
         area_source_discretization=1.,  # km
     )
     source_nodes = read_nodes(MIXED_SRC_MODEL, filter_sources, ValidNode)
     (cls.area, cls.point, cls.simple, cls.cmplx, cls.char_simple,
      cls.char_complex, cls.char_multi) = map(
         cls.converter.convert_node, source_nodes)
     # the parameters here would typically be specified in the job .ini
     cls.investigation_time = 50.
     cls.rupture_mesh_spacing = 1  # km
     cls.complex_fault_mesh_spacing = 1  # km
     cls.width_of_mfd_bin = 1.  # for Truncated GR MFDs
     cls.area_source_discretization = 1.  # km
Ejemplo n.º 29
0
def get_vulnerability_functions(fname):
    """
    :param fname:
        path of the vulnerability filter
    :returns:
        a dictionary imt, taxonomy -> vulnerability function
    """
    # NB: the vulnerabilitySetID is not an unique ID!
    # it is right to have several vulnerability sets with the same ID
    # the IMTs can also be duplicated and with different levels, each
    # vulnerability function in a set will get its own levels
    imts = set()
    taxonomies = set()
    vf_dict = {}  # imt, taxonomy -> vulnerability function
    for vset in read_nodes(fname, filter_vset,
                           nodefactory['vulnerabilityModel']):
        imt_str, imls, min_iml, max_iml, imlUnit = ~vset.IML
        imts.add(imt_str)
        for vfun in vset.getnodes('discreteVulnerability'):
            taxonomy = vfun['vulnerabilityFunctionID']
            if taxonomy in taxonomies:
                raise InvalidFile(
                    'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
                    (taxonomy, fname, vfun.lineno))
            taxonomies.add(taxonomy)
            with context(fname, vfun):
                loss_ratios = ~vfun.lossRatio
                coefficients = ~vfun.coefficientsVariation
            if len(loss_ratios) != len(imls):
                raise InvalidFile(
                    'There are %d loss ratios, but %d imls: %s, line %d' %
                    (len(loss_ratios), len(imls), fname,
                     vfun.lossRatio.lineno))
            if len(coefficients) != len(imls):
                raise InvalidFile(
                    'There are %d coefficients, but %d imls: %s, line %d' %
                    (len(coefficients), len(imls), fname,
                     vfun.coefficientsVariation.lineno))
            with context(fname, vfun):
                vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction(
                    imt_str, imls, loss_ratios, coefficients,
                    vfun['probabilisticDistribution'])
    return vf_dict
 def read(self, nrml_file, validate=False,
         simple_fault_spacing=1.0, complex_mesh_spacing=5.0,
         mfd_spacing=0.1):
     """
     Build the source model from nrml format
     """
     self.source_file = nrml_file
     if validate:
         converter = SourceConverter(1.0, simple_fault_spacing,
                                     complex_mesh_spacing,
                                     mfd_spacing,
                                     10.0)
     src_nodes = read_nodes(nrml_file, lambda elem: "Source" in elem.tag,
                            nrml.nodefactory["sourceModel"])
     sources = []
     for no, src_node in enumerate(src_nodes, 1):
         if validate:
             print "Validating Source %s" % src_node.attrib["id"]
             _ = converter.convert_node(src_node)
         sources.append(src_node)
     return SourceModel(sources)
Ejemplo n.º 31
0
def get_source_models(oqparam, source_model_lt, sitecol=None, in_memory=True):
    """
    Build all the source models generated by the logic tree.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param source_model_lt:
        a :class:`openquake.commonlib.logictree.SourceModelLogicTree` instance
    :param in_memory:
        if True, keep in memory the sources, else just collect the TRTs
    :returns:
        an iterator over :class:`openquake.commonlib.source.SourceModel`
        tuples
    """
    converter = sourceconverter.SourceConverter(
        oqparam.investigation_time,
        oqparam.rupture_mesh_spacing,
        oqparam.complex_fault_mesh_spacing,
        oqparam.width_of_mfd_bin,
        oqparam.area_source_discretization)

    # consider only the effective realizations
    rlzs = logictree.get_effective_rlzs(source_model_lt)
    samples_by_lt_path = source_model_lt.samples_by_lt_path()
    for i, rlz in enumerate(rlzs):
        sm = rlz.value  # name of the source model
        smpath = rlz.lt_path
        num_samples = samples_by_lt_path[smpath]
        if num_samples > 1:
            logging.warn('The source path %s was sampled %d times',
                         smpath, num_samples)
        fname = possibly_gunzip(os.path.join(oqparam.base_path, sm))
        if in_memory:
            apply_unc = source_model_lt.make_apply_uncertainties(smpath)
            try:
                trt_models = source.parse_source_model(
                    fname, converter, apply_unc)
            except ValueError as e:
                if str(e) in ('Surface does not conform with Aki & '
                              'Richards convention',
                              'Edges points are not in the right order'):
                    raise InvalidFile('''\
    %s: %s. Probably you are using an obsolete model.
    In that case you can fix the file with the command
    python -m openquake.engine.tools.correct_complex_sources %s
    ''' % (fname, e, fname))
                else:
                    raise
        else:  # just collect the TRT models
            smodel = read_nodes(fname, lambda el: 'sourceModel' in el.tag,
                                source.nodefactory['sourceModel']).next()
            trt_models = source.TrtModel.collect(smodel)
        trts = [mod.trt for mod in trt_models]
        source_model_lt.tectonic_region_types.update(trts)

        gsim_file = oqparam.inputs.get('gsim_logic_tree')
        if gsim_file:  # check TRTs
            gsim_lt = get_gsim_lt(oqparam, trts)
            for trt_model in trt_models:
                if trt_model.trt not in gsim_lt.values:
                    raise ValueError(
                        "Found in %r a tectonic region type %r inconsistent "
                        "with the ones in %r" % (sm, trt_model.trt, gsim_file))
                trt_model.gsims = gsim_lt.values[trt_model.trt]
        else:
            gsim_lt = logictree.DummyGsimLogicTree()
        weight = rlz.weight / num_samples
        yield source.SourceModel(
            sm, weight, smpath, trt_models, gsim_lt, i, num_samples)
Ejemplo n.º 32
0
def get_source_models(oqparam, gsim_lt, source_model_lt, in_memory=True):
    """
    Build all the source models generated by the logic tree.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param gsim_lt:
        a :class:`openquake.commonlib.logictree.GsimLogicTree` instance
    :param source_model_lt:
        a :class:`openquake.commonlib.logictree.SourceModelLogicTree` instance
    :param in_memory:
        if True, keep in memory the sources, else just collect the TRTs
    :returns:
        an iterator over :class:`openquake.commonlib.source.SourceModel`
        tuples
    """
    converter = sourceconverter.SourceConverter(
        oqparam.investigation_time, oqparam.rupture_mesh_spacing,
        oqparam.complex_fault_mesh_spacing, oqparam.width_of_mfd_bin,
        oqparam.area_source_discretization)
    parser = source.SourceModelParser(converter)

    # consider only the effective realizations
    rlzs = logictree.get_effective_rlzs(source_model_lt)
    samples_by_lt_path = source_model_lt.samples_by_lt_path()
    num_source_models = len(rlzs)
    for i, rlz in enumerate(rlzs):
        sm = rlz.value  # name of the source model
        smpath = rlz.lt_path
        num_samples = samples_by_lt_path[smpath]
        fname = possibly_gunzip(os.path.join(oqparam.base_path, sm))
        if in_memory:
            apply_unc = source_model_lt.make_apply_uncertainties(smpath)
            try:
                trt_models = parser.parse_trt_models(fname, apply_unc)
            except ValueError as e:
                if str(e) in ('Surface does not conform with Aki & '
                              'Richards convention',
                              'Edges points are not in the right order'):
                    raise InvalidFile('''\
    %s: %s. Probably you are using an obsolete model.
    In that case you can fix the file with the command
    python -m openquake.engine.tools.correct_complex_sources %s
    ''' % (fname, e, fname))
                else:
                    raise
        else:  # just collect the TRT models
            smodel = next(
                read_nodes(fname, lambda el: 'sourceModel' in el.tag,
                           source.nodefactory['sourceModel']))
            trt_models = source.TrtModel.collect(smodel)
        trts = [mod.trt for mod in trt_models]
        source_model_lt.tectonic_region_types.update(trts)

        gsim_file = oqparam.inputs.get('gsim_logic_tree')
        if gsim_file:  # check TRTs
            for trt_model in trt_models:
                if trt_model.trt not in gsim_lt.values:
                    raise ValueError(
                        "Found in %r a tectonic region type %r inconsistent "
                        "with the ones in %r" % (sm, trt_model.trt, gsim_file))
        else:
            gsim_lt = logictree.GsimLogicTree.from_(oqparam.gsim)
        weight = rlz.weight / num_samples
        num_gsim_paths = (num_samples if oqparam.number_of_logic_tree_samples
                          else gsim_lt.get_num_paths())
        logging.info('Processed source model %d/%d with %d gsim path(s)',
                     i + 1, num_source_models, num_gsim_paths)
        yield source.SourceModel(sm, weight, smpath, trt_models,
                                 num_gsim_paths, i, num_samples)

    # log if some source file is being used more than once
    for fname, hits in parser.fname_hits.items():
        if hits > 1:
            logging.info('%s has been considered %d times', fname, hits)
Ejemplo n.º 33
0
def get_vulnerability_functions(fname):
    """
    :param fname:
        path of the vulnerability filter
    :returns:
        a dictionary imt, taxonomy -> vulnerability function
    """
    # NB: the IMTs can be duplicated and with different levels, each
    # vulnerability function in a set will get its own levels
    imts = set()
    taxonomies = set()
    vf_dict = {}  # imt, taxonomy -> vulnerability function
    node = nrml.read(fname)
    if node['xmlns'] == nrml.NRML05:
        vmodel = node[0]
        for vfun in vmodel.getnodes('vulnerabilityFunction'):
            with context(fname, vfun):
                imt = vfun.imls['imt']
                imls = numpy.array(~vfun.imls)
                taxonomy = vfun['id']
            if taxonomy in taxonomies:
                raise InvalidFile(
                    'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
                    (taxonomy, fname, vfun.lineno))
            if vfun['dist'] == 'PM':
                loss_ratios, probs = [], []
                for probabilities in vfun[1:]:
                    loss_ratios.append(probabilities['lr'])
                    probs.append(valid.probabilities(~probabilities))
                probs = numpy.array(probs)
                assert probs.shape == (len(loss_ratios), len(imls))
                vf_dict[imt,
                        taxonomy] = (scientific.VulnerabilityFunctionWithPMF(
                            taxonomy, imt, imls, numpy.array(loss_ratios),
                            probs))
            else:
                with context(fname, vfun):
                    loss_ratios = ~vfun.meanLRs
                    coefficients = ~vfun.covLRs
                if len(loss_ratios) != len(imls):
                    raise InvalidFile(
                        'There are %d loss ratios, but %d imls: %s, line %d' %
                        (len(loss_ratios), len(imls), fname,
                         vfun.meanLRs.lineno))
                if len(coefficients) != len(imls):
                    raise InvalidFile(
                        'There are %d coefficients, but %d imls: %s, '
                        'line %d' % (len(coefficients), len(imls), fname,
                                     vfun.covLRs.lineno))
                with context(fname, vfun):
                    vf_dict[imt, taxonomy] = scientific.VulnerabilityFunction(
                        taxonomy, imt, imls, loss_ratios, coefficients,
                        vfun['dist'])
        return vf_dict
    # otherwise, read the old format (NRML 0.4)
    for vset in read_nodes(fname, filter_vset,
                           nodefactory['vulnerabilityModel']):
        imt_str, imls, min_iml, max_iml, imlUnit = ~vset.IML
        imts.add(imt_str)
        for vfun in vset.getnodes('discreteVulnerability'):
            taxonomy = vfun['vulnerabilityFunctionID']
            if taxonomy in taxonomies:
                raise InvalidFile(
                    'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
                    (taxonomy, fname, vfun.lineno))
            taxonomies.add(taxonomy)
            with context(fname, vfun):
                loss_ratios = ~vfun.lossRatio
                coefficients = ~vfun.coefficientsVariation
            if len(loss_ratios) != len(imls):
                raise InvalidFile(
                    'There are %d loss ratios, but %d imls: %s, line %d' %
                    (len(loss_ratios), len(imls), fname,
                     vfun.lossRatio.lineno))
            if len(coefficients) != len(imls):
                raise InvalidFile(
                    'There are %d coefficients, but %d imls: %s, line %d' %
                    (len(coefficients), len(imls), fname,
                     vfun.coefficientsVariation.lineno))
            with context(fname, vfun):
                vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction(
                    taxonomy, imt_str, imls, loss_ratios, coefficients,
                    vfun['probabilisticDistribution'])
    return vf_dict
Ejemplo n.º 34
0
def get_fragility_functions(fname,
                            continuous_fragility_discretization,
                            steps_per_interval=None):
    """
    :param fname:
        path of the fragility file
    :param continuous_fragility_discretization:
        continuous_fragility_discretization parameter
    :param steps_per_interval:
        steps_per_interval parameter
    :returns:
        damage_states list and dictionary taxonomy -> functions
    """
    [fmodel] = read_nodes(fname, lambda el: el.tag.endswith('fragilityModel'),
                          nodefactory['fragilityModel'])
    # ~fmodel.description is ignored
    limit_states = ~fmodel.limitStates
    tag = 'ffc' if fmodel['format'] == 'continuous' else 'ffd'
    fragility_functions = AccumDict()  # taxonomy -> functions
    for ffs in fmodel.getnodes('ffs'):
        add_zero_value = False
        # NB: the noDamageLimit is only defined for discrete fragility
        # functions. It is a way to set the starting point of the functions:
        # if noDamageLimit is at the left of each IMLs, it means that the
        # function starts at zero at the given point, so we need to add
        # noDamageLimit to the list of IMLs and zero to the list of poes
        nodamage = ffs.attrib.get('noDamageLimit')
        taxonomy = ~ffs.taxonomy
        imt_str, imls, min_iml, max_iml, imlUnit = ~ffs.IML

        if fmodel['format'] == 'discrete':
            if nodamage is not None and nodamage < imls[0]:
                # discrete fragility
                imls = [nodamage] + imls
                add_zero_value = True
            if steps_per_interval:
                gen_imls = scientific.fine_graining(imls, steps_per_interval)
            else:
                gen_imls = imls
        else:  # continuous:
            if min_iml is None:
                raise InvalidFile('Missing attribute minIML, line %d' %
                                  ffs.IML.lineno)
            elif max_iml is None:
                raise InvalidFile('Missing attribute maxIML, line %d' %
                                  ffs.IML.lineno)
            gen_imls = numpy.linspace(min_iml, max_iml,
                                      continuous_fragility_discretization)
        fragility_functions[taxonomy] = scientific.FragilityFunctionList(
            [],
            imt=imt_str,
            imls=list(gen_imls),
            no_damage_limit=nodamage,
            continuous_fragility_discretization=
            continuous_fragility_discretization,
            steps_per_interval=steps_per_interval)
        lstates = []
        for ff in ffs.getnodes(tag):
            ls = ff['ls']  # limit state
            lstates.append(ls)
            if tag == 'ffc':
                with context(fname, ff):
                    mean_stddev = ~ff.params
                fragility_functions[taxonomy].append(
                    scientific.FragilityFunctionContinuous(ls, *mean_stddev))
            else:  # discrete
                with context(fname, ff):
                    poes = ~ff.poEs
                if add_zero_value:
                    poes = [0.] + poes

                fragility_functions[taxonomy].append(
                    scientific.FragilityFunctionDiscrete(
                        ls, imls, poes, nodamage))

        if lstates != limit_states:
            raise InvalidFile("Expected limit states %s, got %s in %s" %
                              (limit_states, lstates, fname))

    fragility_functions.damage_states = ['no_damage'] + limit_states
    return fragility_functions
Ejemplo n.º 35
0
def get_fragility_functions(fname, continuous_fragility_discretization,
                            steps_per_interval=None):
    """
    :param fname:
        path of the fragility file
    :param continuous_fragility_discretization:
        continuous_fragility_discretization parameter
    :param steps_per_interval:
        steps_per_interval parameter
    :returns:
        damage_states list and dictionary taxonomy -> functions
    """
    [fmodel] = read_nodes(
        fname, lambda el: el.tag.endswith('fragilityModel'),
        nodefactory['fragilityModel'])
    # ~fmodel.description is ignored
    limit_states = ~fmodel.limitStates
    tag = 'ffc' if fmodel['format'] == 'continuous' else 'ffd'
    fragility_functions = AccumDict()  # taxonomy -> functions
    for ffs in fmodel.getnodes('ffs'):
        add_zero_value = False
        # NB: the noDamageLimit is only defined for discrete fragility
        # functions. It is a way to set the starting point of the functions:
        # if noDamageLimit is at the left of each IMLs, it means that the
        # function starts at zero at the given point, so we need to add
        # noDamageLimit to the list of IMLs and zero to the list of poes
        nodamage = ffs.attrib.get('noDamageLimit')
        taxonomy = ~ffs.taxonomy
        imt_str, imls, min_iml, max_iml, imlUnit = ~ffs.IML

        if fmodel['format'] == 'discrete':
            if nodamage is not None and nodamage < imls[0]:
                # discrete fragility
                imls = [nodamage] + imls
                add_zero_value = True
            if steps_per_interval:
                gen_imls = scientific.fine_graining(imls, steps_per_interval)
            else:
                gen_imls = imls
        else:  # continuous:
            if min_iml is None:
                raise InvalidFile(
                    'Missing attribute minIML, line %d' % ffs.IML.lineno)
            elif max_iml is None:
                raise InvalidFile(
                    'Missing attribute maxIML, line %d' % ffs.IML.lineno)
            gen_imls = numpy.linspace(min_iml, max_iml,
                                      continuous_fragility_discretization)
        fragility_functions[taxonomy] = scientific.FragilityFunctionList(
            [], imt=imt_str, imls=list(gen_imls),
            no_damage_limit=nodamage,
            continuous_fragility_discretization=
            continuous_fragility_discretization,
            steps_per_interval=steps_per_interval)
        lstates = []
        for ff in ffs.getnodes(tag):
            ls = ff['ls']  # limit state
            lstates.append(ls)
            if tag == 'ffc':
                with context(fname, ff):
                    mean_stddev = ~ff.params
                fragility_functions[taxonomy].append(
                    scientific.FragilityFunctionContinuous(ls, *mean_stddev))
            else:  # discrete
                with context(fname, ff):
                    poes = ~ff.poEs
                if add_zero_value:
                    poes = [0.] + poes

                fragility_functions[taxonomy].append(
                    scientific.FragilityFunctionDiscrete(
                        ls, imls, poes, nodamage))

        if lstates != limit_states:
            raise InvalidFile("Expected limit states %s, got %s in %s" %
                             (limit_states, lstates, fname))

    fragility_functions.damage_states = ['no_damage'] + limit_states
    return fragility_functions
Ejemplo n.º 36
0
def get_source_models(oqparam, source_model_lt, in_memory=True):
    """
    Build all the source models generated by the logic tree.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param source_model_lt:
        a :class:`openquake.commonlib.logictree.SourceModelLogicTree` instance
    :param in_memory:
        if True, keep in memory the sources, else just collect the TRTs
    :returns:
        an iterator over :class:`openquake.commonlib.source.SourceModel`
        tuples
    """
    converter = sourceconverter.SourceConverter(
        oqparam.investigation_time,
        oqparam.rupture_mesh_spacing,
        oqparam.complex_fault_mesh_spacing,
        oqparam.width_of_mfd_bin,
        oqparam.area_source_discretization,
    )
    parser = source.SourceModelParser(converter)

    # consider only the effective realizations
    rlzs = logictree.get_effective_rlzs(source_model_lt)
    samples_by_lt_path = source_model_lt.samples_by_lt_path()
    num_source_models = len(rlzs)
    for i, rlz in enumerate(rlzs):
        sm = rlz.value  # name of the source model
        smpath = rlz.lt_path
        num_samples = samples_by_lt_path[smpath]
        fname = possibly_gunzip(os.path.join(oqparam.base_path, sm))
        if in_memory:
            apply_unc = source_model_lt.make_apply_uncertainties(smpath)
            try:
                trt_models = parser.parse_trt_models(fname, apply_unc)
            except ValueError as e:
                if str(e) in (
                    "Surface does not conform with Aki & " "Richards convention",
                    "Edges points are not in the right order",
                ):
                    raise InvalidFile(
                        """\
    %s: %s. Probably you are using an obsolete model.
    In that case you can fix the file with the command
    python -m openquake.engine.tools.correct_complex_sources %s
    """
                        % (fname, e, fname)
                    )
                else:
                    raise
        else:  # just collect the TRT models
            smodel = next(read_nodes(fname, lambda el: "sourceModel" in el.tag, source.nodefactory["sourceModel"]))
            trt_models = source.TrtModel.collect(smodel)
        trts = [mod.trt for mod in trt_models]
        source_model_lt.tectonic_region_types.update(trts)

        gsim_file = oqparam.inputs.get("gsim_logic_tree")
        if gsim_file:  # check TRTs
            gsim_lt = get_gsim_lt(oqparam, trts)
            for trt_model in trt_models:
                if trt_model.trt not in gsim_lt.values:
                    raise ValueError(
                        "Found in %r a tectonic region type %r inconsistent "
                        "with the ones in %r" % (sm, trt_model.trt, gsim_file)
                    )
                trt_model.gsims = gsim_lt.values[trt_model.trt]
        else:
            gsim_lt = logictree.DummyGsimLogicTree()
        weight = rlz.weight / num_samples
        num_gsim_paths = num_samples if oqparam.number_of_logic_tree_samples else gsim_lt.get_num_paths()
        logging.info("Processed source model %d/%d with %d gsim path(s)", i + 1, num_source_models, num_gsim_paths)
        yield source.SourceModel(sm, weight, smpath, trt_models, gsim_lt, i, num_samples)

    # log if some source file is being used more than once
    for fname, hits in parser.fname_hits.items():
        if hits > 1:
            logging.info("%s has been considered %d times", fname, hits)