Пример #1
0
def get_consequence_model(node, fname):
    with context(fname, node):
        description = ~node.description  # make sure it is there
        limitStates = ~node.limitStates  # make sure it is there
        # ASK: is the 'id' mandatory?
        node['assetCategory']  # make sure it is there
        node['lossCategory']  # make sure it is there
        cfs = node[2:]
    functions = {}
    for cf in cfs:
        with context(fname, cf):
            params = []
            if len(limitStates) != len(cf):
                raise ValueError(
                    'Expected %d limit states, got %d' %
                    (len(limitStates), len(cf)))
            for ls, param in zip(limitStates, cf):
                with context(fname, param):
                    if param['ls'] != ls:
                        raise ValueError("Expected '%s', got '%s'" %
                                         (ls, param['ls']))
                    params.append((param['mean'], param['stddev']))
            functions[cf['id']] = scientific.ConsequenceFunction(
                cf['id'], cf['dist'], params)
    attrs = node.attrib.copy()
    attrs.update(description=description, limitStates=limitStates)
    cmodel = scientific.ConsequenceModel(**attrs)
    cmodel.update(functions)
    return cmodel
Пример #2
0
def get_vulnerability_functions_04(fname):
    """
    Parse the vulnerability model in NRML 0.4 format.

    :param fname:
        path of the vulnerability file
    :returns:
        a dictionary imt, taxonomy -> vulnerability function + vset
    """
    categories = dict(assetCategory=set(), lossCategory=set(),
                      vulnerabilitySetID=set())
    imts = set()
    taxonomies = set()
    vf_dict = {}  # imt, taxonomy -> vulnerability function
    for vset in nrml.read(fname).vulnerabilityModel:
        categories['assetCategory'].add(vset['assetCategory'])
        categories['lossCategory'].add(vset['lossCategory'])
        categories['vulnerabilitySetID'].add(vset['vulnerabilitySetID'])
        IML = vset.IML
        imt_str = IML['IMT']
        imls = ~IML
        imts.add(imt_str)
        for vfun in vset.getnodes('discreteVulnerability'):
            taxonomy = vfun['vulnerabilityFunctionID']
            if taxonomy in taxonomies:
                raise InvalidFile(
                    'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
                    (taxonomy, fname, vfun.lineno))
            taxonomies.add(taxonomy)
            with context(fname, vfun):
                loss_ratios = ~vfun.lossRatio
                coefficients = ~vfun.coefficientsVariation
            if len(loss_ratios) != len(imls):
                raise InvalidFile(
                    'There are %d loss ratios, but %d imls: %s, line %d' %
                    (len(loss_ratios), len(imls), fname,
                     vfun.lossRatio.lineno))
            if len(coefficients) != len(imls):
                raise InvalidFile(
                    'There are %d coefficients, but %d imls: %s, line %d' %
                    (len(coefficients), len(imls), fname,
                     vfun.coefficientsVariation.lineno))
            with context(fname, vfun):
                vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction(
                    taxonomy, imt_str, imls, loss_ratios, coefficients,
                    vfun['probabilisticDistribution'])
    categories['id'] = '_'.join(sorted(categories['vulnerabilitySetID']))
    del categories['vulnerabilitySetID']
    return vf_dict, categories
Пример #3
0
 def get_mag_rake_hypo(self, node):
     with context(self.fname, node):
         mag = ~node.magnitude
         rake = ~node.rake
         h = node.hypocenter
         hypocenter = geo.Point(h['lon'], h['lat'], h['depth'])
     return mag, rake, hypocenter
Пример #4
0
    def convert_complexFaultSource(self, node):
        """
        Convert the given node into a complex fault object.

        :param node: a node with tag areaGeometry
        :returns: a :class:`openquake.hazardlib.source.ComplexFaultSource`
                  instance
        """
        geom = node.complexFaultGeometry
        edges = self.geo_lines(geom)
        mfd = self.convert_mfdist(node)
        msr = valid.SCALEREL[~node.magScaleRel]()
        with context(self.fname, node):
            cmplx = source.ComplexFaultSource(
                source_id=node['id'],
                name=node['name'],
                tectonic_region_type=node.attrib.get('tectonicRegion'),
                mfd=mfd,
                rupture_mesh_spacing=self.complex_fault_mesh_spacing,
                magnitude_scaling_relationship=msr,
                rupture_aspect_ratio=~node.ruptAspectRatio,
                edges=edges,
                rake=~node.rake,
                temporal_occurrence_model=self.tom)
        return cmplx
Пример #5
0
    def convert_surfaces(self, surface_nodes):
        """
        Utility to convert a list of surface nodes into a single hazardlib
        surface. There are three possibilities:

        1. there is a single simpleFaultGeometry node; returns a
           :class:`openquake.hazardlib.geo.simpleFaultSurface` instance
        2. there is a single complexFaultGeometry node; returns a
           :class:`openquake.hazardlib.geo.complexFaultSurface` instance
        3. there is a list of PlanarSurface nodes; returns a
           :class:`openquake.hazardlib.geo.MultiSurface` instance

        :param surface_nodes: surface nodes as just described
        """
        surface_node = surface_nodes[0]
        if surface_node.tag.endswith('simpleFaultGeometry'):
            surface = geo.SimpleFaultSurface.from_fault_data(
                self.geo_line(surface_node),
                ~surface_node.upperSeismoDepth,
                ~surface_node.lowerSeismoDepth,
                ~surface_node.dip,
                self.rupture_mesh_spacing)
        elif surface_node.tag.endswith('complexFaultGeometry'):
            surface = geo.ComplexFaultSurface.from_fault_data(
                self.geo_lines(surface_node),
                self.complex_fault_mesh_spacing)
        elif surface_node.tag.endswith('griddedSurface'):
            with context(self.fname, surface_node):
                coords = split_coords_3d(~surface_node.posList)
            points = [geo.Point(*p) for p in coords]
            surface = geo.GriddedSurface.from_points_list(points)
        else:  # a collection of planar surfaces
            planar_surfaces = list(map(self.geo_planar, surface_nodes))
            surface = geo.MultiSurface(planar_surfaces)
        return surface
Пример #6
0
def get_vulnerability_functions_04(node, fname):
    """
    :param node:
        a vulnerabilityModel node
    :param fname:
        path to the vulnerability file
    :returns:
        a dictionary imt, taxonomy -> vulnerability function
    """
    # NB: the fname below can contain non-ASCII characters
    logging.warn(u'Please upgrade %s to NRML 0.5', fname)
    # NB: the IMTs can be duplicated and with different levels, each
    # vulnerability function in a set will get its own levels
    imts = set()
    taxonomies = set()
    # imt, taxonomy -> vulnerability function
    vmodel = scientific.VulnerabilityModel(**node.attrib)
    for vset in node:
        imt_str = vset.IML['IMT']
        imls = ~vset.IML
        imts.add(imt_str)
        for vfun in vset.getnodes('discreteVulnerability'):
            taxonomy = vfun['vulnerabilityFunctionID']
            if taxonomy in taxonomies:
                raise InvalidFile(
                    'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
                    (taxonomy, fname, vfun.lineno))
            taxonomies.add(taxonomy)
            with context(fname, vfun):
                loss_ratios = ~vfun.lossRatio
                coefficients = ~vfun.coefficientsVariation
            if len(loss_ratios) != len(imls):
                raise InvalidFile(
                    'There are %d loss ratios, but %d imls: %s, line %d' %
                    (len(loss_ratios), len(imls), fname,
                     vfun.lossRatio.lineno))
            if len(coefficients) != len(imls):
                raise InvalidFile(
                    'There are %d coefficients, but %d imls: %s, line %d' %
                    (len(coefficients), len(imls), fname,
                     vfun.coefficientsVariation.lineno))
            with context(fname, vfun):
                vmodel[imt_str, taxonomy] = scientific.VulnerabilityFunction(
                    taxonomy, imt_str, imls, loss_ratios, coefficients,
                    vfun['probabilisticDistribution'])
    return vmodel
Пример #7
0
def get_vulnerability_functions_04(node, fname):
    """
    :param node:
        a vulnerabilityModel node
    :param fname:
        path to the vulnerability file
    :returns:
        a dictionary imt, vf_id -> vulnerability function
    """
    logging.warning('Please upgrade %s to NRML 0.5', fname)
    # NB: the IMTs can be duplicated and with different levels, each
    # vulnerability function in a set will get its own levels
    imts = set()
    vf_ids = set()
    # imt, vf_id -> vulnerability function
    vmodel = scientific.VulnerabilityModel(**node.attrib)
    for vset in node:
        imt_str = vset.IML['IMT']
        imls = ~vset.IML
        imts.add(imt_str)
        for vfun in vset.getnodes('discreteVulnerability'):
            vf_id = vfun['vulnerabilityFunctionID']
            if vf_id in vf_ids:
                raise InvalidFile(
                    'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
                    (vf_id, fname, vfun.lineno))
            vf_ids.add(vf_id)
            with context(fname, vfun):
                loss_ratios = ~vfun.lossRatio
                coefficients = ~vfun.coefficientsVariation
            if len(loss_ratios) != len(imls):
                raise InvalidFile(
                    'There are %d loss ratios, but %d imls: %s, line %d' %
                    (len(loss_ratios), len(imls), fname,
                     vfun.lossRatio.lineno))
            if len(coefficients) != len(imls):
                raise InvalidFile(
                    'There are %d coefficients, but %d imls: %s, line %d' %
                    (len(coefficients), len(imls), fname,
                     vfun.coefficientsVariation.lineno))
            with context(fname, vfun):
                vmodel[imt_str, vf_id] = scientific.VulnerabilityFunction(
                    vf_id, imt_str, imls, loss_ratios, coefficients,
                    vfun['probabilisticDistribution'])
    return vmodel
Пример #8
0
def get_source_model_05(node, fname, converter):
    converter.fname = fname
    groups = []  # expect a sequence of sourceGroup nodes
    for src_group in node:
        with context(fname, src_group):
            if 'sourceGroup' not in src_group.tag:
                raise ValueError('expected sourceGroup')
        groups.append(converter.convert_node(src_group))
    return sorted(groups)
Пример #9
0
    def convert_sourceGroup(self, node):
        """
        Convert the given node into a SourceGroup object.

        :param node:
            a node with tag sourceGroup
        :returns:
            a :class:`SourceGroup` instance
        """
        trt = node['tectonicRegion']
        srcs_weights = node.attrib.get('srcs_weights')
        grp_attrs = {k: v for k, v in node.attrib.items()
                     if k not in ('name', 'src_interdep', 'rup_interdep',
                                  'srcs_weights')}
        sg = SourceGroup(trt, min_mag=self.minimum_magnitude)
        sg.temporal_occurrence_model = self.get_tom(node)
        sg.name = node.attrib.get('name')
        # set attributes related to occurrence
        sg.src_interdep = node.attrib.get('src_interdep', 'indep')
        sg.rup_interdep = node.attrib.get('rup_interdep', 'indep')
        sg.grp_probability = node.attrib.get('grp_probability')
        # set the cluster attribute
        sg.cluster = node.attrib.get('cluster') == 'true'
        #
        for src_node in node:
            if self.source_id and self.source_id != src_node['id']:
                continue  # filter by source_id
            src = self.convert_node(src_node)
            # transmit the group attributes to the underlying source
            for attr, value in grp_attrs.items():
                if attr == 'tectonicRegion':
                    src_trt = src_node.get('tectonicRegion')
                    if src_trt and src_trt != trt:
                        with context(self.fname, src_node):
                            raise ValueError('Found %s, expected %s' %
                                             (src_node['tectonicRegion'], trt))
                    src.tectonic_region_type = trt
                elif attr == 'grp_probability':
                    pass  # do not transmit
                else:  # transmit as it is
                    setattr(src, attr, node[attr])
            sg.update(src)
        if srcs_weights is not None:
            if len(node) and len(srcs_weights) != len(node):
                raise ValueError(
                    'There are %d srcs_weights but %d source(s) in %s'
                    % (len(srcs_weights), len(node), self.fname))
            for src, sw in zip(sg, srcs_weights):
                src.mutex_weight = sw
        # check that, when the cluster option is set, the group has a temporal
        # occurrence model properly defined
        if sg.cluster and not hasattr(sg, 'temporal_occurrence_model'):
            msg = 'The Source Group is a cluster but does not have a '
            msg += 'temporal occurrence model'
            raise ValueError(msg)
        return sg
Пример #10
0
    def geo_line(self, edge):
        """
        Utility function to convert a node of kind edge
        into a :class:`openquake.hazardlib.geo.Line` instance.

        :param edge: a node describing an edge
        """
        with context(self.fname, edge.LineString.posList) as plist:
            coords = split_coords_2d(~plist)
        return geo.Line([geo.Point(*p) for p in coords])
Пример #11
0
    def geo_line(self, edge):
        """
        Utility function to convert a node of kind edge
        into a :class:`openquake.hazardlib.geo.Line` instance.

        :param edge: a node describing an edge
        """
        with context(self.fname, edge.LineString.posList) as plist:
            coords = split_coords_2d(~plist)
        return geo.Line([geo.Point(*p) for p in coords])
Пример #12
0
    def convert_node(self, node):
        """
        Convert the given node into a hazardlib source, depending
        on the node tag.

        :param node: a node representing a source
        """
        with context(self.fname, node):
            convert_source = getattr(self, 'convert_' + striptag(node.tag))
        return convert_source(node)
Пример #13
0
    def convert_npdist(self, node):
        """
        Convert the given node into a Nodal Plane Distribution.

        :param node: a nodalPlaneDist node
        :returns: a :class:`openquake.hazardlib.geo.NodalPlane` instance
        """
        with context(self.fname, node):
            npnode = node.nodalPlaneDist
            npdist = []
            for np in npnode:
                prob, strike, dip, rake = (np['probability'], np['strike'],
                                           np['dip'], np['rake'])
                npdist.append((prob, geo.NodalPlane(strike, dip, rake)))
        with context(self.fname, npnode):
            fix_dupl(npdist, self.fname, npnode.lineno)
            if not self.spinning_floating:
                npdist = [(1, npdist[0][1])]  # consider the first nodal plane
            return pmf.PMF(npdist)
Пример #14
0
    def convert_hpdist(self, node):
        """
        Convert the given node into a probability mass function for the
        hypo depth distribution.

        :param node: a hypoDepthDist node
        :returns: a :class:`openquake.hazardlib.pmf.PMF` instance
        """
        with context(self.fname, node):
            return pmf.PMF([(hd['probability'], hd['depth'])
                            for hd in node.hypoDepthDist])
Пример #15
0
    def convert_hpdist(self, node):
        """
        Convert the given node into a probability mass function for the
        hypo depth distribution.

        :param node: a hypoDepthDist node
        :returns: a :class:`openquake.hazardlib.pmf.PMF` instance
        """
        with context(self.fname, node):
            return pmf.PMF([(hd['probability'], hd['depth'])
                            for hd in node.hypoDepthDist])
Пример #16
0
 def _read_csv(self, csvnames, dirname):
     """
     :param csvnames: names of csv files, space separated
     :param dirname: the directory where the csv files are
     :yields: asset nodes
     """
     expected_header = self._csv_header()
     fnames = [os.path.join(dirname, f) for f in csvnames.split()]
     for fname in fnames:
         with open(fname, encoding='utf-8') as f:
             fields = next(csv.reader(f))
             header = set(fields)
             if len(header) < len(fields):
                 raise InvalidFile(
                     '%s: The header %s contains a duplicated field' %
                     (fname, header))
             elif expected_header - header:
                 raise InvalidFile(
                     'Unexpected header in %s\nExpected: %s\nGot: %s' %
                     (fname, sorted(expected_header), sorted(header)))
     occupancy_periods = self.occupancy_periods.split()
     for fname in fnames:
         with open(fname, encoding='utf-8') as f:
             for i, dic in enumerate(csv.DictReader(f), 1):
                 asset = Node('asset', lineno=i)
                 with context(fname, asset):
                     asset['id'] = dic['id']
                     asset['number'] = valid.positivefloat(dic['number'])
                     asset['taxonomy'] = dic['taxonomy']
                     if 'area' in dic:  # optional attribute
                         asset['area'] = dic['area']
                     loc = Node(
                         'location',
                         dict(lon=valid.longitude(dic['lon']),
                              lat=valid.latitude(dic['lat'])))
                     costs = Node('costs')
                     for cost in self.cost_types['name']:
                         a = dict(type=cost, value=dic[cost])
                         costs.append(Node('cost', a))
                     occupancies = Node('occupancies')
                     for period in occupancy_periods:
                         a = dict(occupants=float(dic[period]),
                                  period=period)
                         occupancies.append(Node('occupancy', a))
                     tags = Node('tags')
                     for tagname in self.tagcol.tagnames:
                         if tagname != 'taxonomy':
                             tags.attrib[tagname] = dic[tagname]
                     asset.nodes.extend([loc, costs, occupancies, tags])
                     if i % 100000 == 0:
                         logging.info('Read %d assets', i)
                 yield asset
Пример #17
0
    def convert_mfdist(self, node):
        """
        Convert the given node into a Magnitude-Frequency Distribution
        object.

        :param node: a node of kind incrementalMFD or truncGutenbergRichterMFD
        :returns: a :class:`openquake.hazardlib.mfd.EvenlyDiscretizedMFD.` or
                  :class:`openquake.hazardlib.mfd.TruncatedGRMFD` instance
        """
        with context(self.fname, node):
            [mfd_node] = [
                subnode for subnode in node
                if subnode.tag.endswith(('incrementalMFD',
                                         'truncGutenbergRichterMFD',
                                         'arbitraryMFD',
                                         'YoungsCoppersmithMFD', 'multiMFD'))
            ]
            if mfd_node.tag.endswith('incrementalMFD'):
                return mfd.EvenlyDiscretizedMFD(
                    min_mag=mfd_node['minMag'],
                    bin_width=mfd_node['binWidth'],
                    occurrence_rates=~mfd_node.occurRates)
            elif mfd_node.tag.endswith('truncGutenbergRichterMFD'):
                return mfd.TruncatedGRMFD(a_val=mfd_node['aValue'],
                                          b_val=mfd_node['bValue'],
                                          min_mag=mfd_node['minMag'],
                                          max_mag=mfd_node['maxMag'],
                                          bin_width=self.width_of_mfd_bin)
            elif mfd_node.tag.endswith('arbitraryMFD'):
                return mfd.ArbitraryMFD(magnitudes=~mfd_node.magnitudes,
                                        occurrence_rates=~mfd_node.occurRates)
            elif mfd_node.tag.endswith('YoungsCoppersmithMFD'):
                if "totalMomentRate" in mfd_node.attrib.keys():
                    # Return Youngs & Coppersmith from the total moment rate
                    return mfd.YoungsCoppersmith1985MFD.from_total_moment_rate(
                        min_mag=mfd_node["minMag"],
                        b_val=mfd_node["bValue"],
                        char_mag=mfd_node["characteristicMag"],
                        total_moment_rate=mfd_node["totalMomentRate"],
                        bin_width=mfd_node["binWidth"])
                elif "characteristicRate" in mfd_node.attrib.keys():
                    # Return Youngs & Coppersmith from the total moment rate
                    return mfd.YoungsCoppersmith1985MFD.\
                        from_characteristic_rate(
                            min_mag=mfd_node["minMag"],
                            b_val=mfd_node["bValue"],
                            char_mag=mfd_node["characteristicMag"],
                            char_rate=mfd_node["characteristicRate"],
                            bin_width=mfd_node["binWidth"])
            elif mfd_node.tag.endswith('multiMFD'):
                return mfd.multi_mfd.MultiMFD.from_node(
                    mfd_node, self.width_of_mfd_bin)
Пример #18
0
    def geo_lines(self, edges):
        """
        Utility function to convert a list of edges into a list of
        :class:`openquake.hazardlib.geo.Line` instances.

        :param edge: a node describing an edge
        """
        lines = []
        for edge in edges:
            with context(self.fname, edge):
                coords = split_coords_3d(~edge.LineString.posList)
            lines.append(geo.Line([geo.Point(*p) for p in coords]))
        return lines
Пример #19
0
    def geo_lines(self, edges):
        """
        Utility function to convert a list of edges into a list of
        :class:`openquake.hazardlib.geo.Line` instances.

        :param edge: a node describing an edge
        """
        lines = []
        for edge in edges:
            with context(self.fname, edge):
                coords = split_coords_3d(~edge.LineString.posList)
            lines.append(geo.Line([geo.Point(*p) for p in coords]))
        return lines
Пример #20
0
    def convert_hpdist(self, node):
        """
        Convert the given node into a probability mass function for the
        hypo depth distribution.

        :param node: a hypoDepthDist node
        :returns: a :class:`openquake.hazardlib.pmf.PMF` instance
        """
        with context(self.fname, node):
            hcdist = [(hd['probability'], hd['depth'])
                      for hd in node.hypoDepthDist]
            if not self.spinning_floating:  # consider the first hypocenter
                hcdist = [(1, hcdist[0][1])]
            return pmf.PMF(hcdist)
Пример #21
0
    def convert_npdist(self, node):
        """
        Convert the given node into a Nodal Plane Distribution.

        :param node: a nodalPlaneDist node
        :returns: a :class:`openquake.hazardlib.geo.NodalPlane` instance
        """
        with context(self.fname, node):
            npdist = []
            for np in node.nodalPlaneDist:
                prob, strike, dip, rake = (
                    np['probability'], np['strike'], np['dip'], np['rake'])
                npdist.append((prob, geo.NodalPlane(strike, dip, rake)))
            return pmf.PMF(npdist)
Пример #22
0
    def convert_hpdist(self, node):
        """
        Convert the given node into a probability mass function for the
        hypo depth distribution.

        :param node: a hypoDepthDist node
        :returns: a :class:`openquake.hazardlib.pmf.PMF` instance
        """
        with context(self.fname, node):
            hcdist = [(hd['probability'], hd['depth'])
                      for hd in node.hypoDepthDist]
            if os.environ.get('OQ_FLOATING') == 'no':
                hcdist = [(1, hcdist[0][1])]
            return pmf.PMF(hcdist)
Пример #23
0
 def _read_csv(self):
     """
     :yields: asset nodes
     """
     expected_header = self._csv_header()
     for fname in self.datafiles:
         with open(fname, encoding='utf-8') as f:
             fields = next(csv.reader(f))
             header = set(fields)
             if len(header) < len(fields):
                 raise InvalidFile(
                     '%s: The header %s contains a duplicated field' %
                     (fname, header))
             elif expected_header - header - {'exposure', 'country'}:
                 raise InvalidFile(
                     'Unexpected header in %s\nExpected: %s\nGot: %s' %
                     (fname, sorted(expected_header), sorted(header)))
     occupancy_periods = self.occupancy_periods.split()
     for fname in self.datafiles:
         with open(fname, encoding='utf-8') as f:
             for i, dic in enumerate(csv.DictReader(f), 1):
                 asset = Node('asset', lineno=i)
                 with context(fname, asset):
                     asset['id'] = dic['id']
                     asset['number'] = valid.positivefloat(dic['number'])
                     asset['taxonomy'] = dic['taxonomy']
                     if 'area' in dic:  # optional attribute
                         asset['area'] = dic['area']
                     loc = Node(
                         'location',
                         dict(lon=valid.longitude(dic['lon']),
                              lat=valid.latitude(dic['lat'])))
                     costs = Node('costs')
                     for cost in self.cost_types['name']:
                         a = dict(type=cost, value=dic[cost])
                         if 'retrofitted' in dic:
                             a['retrofitted'] = dic['retrofitted']
                         costs.append(Node('cost', a))
                     occupancies = Node('occupancies')
                     for period in occupancy_periods:
                         a = dict(occupants=float(dic[period]),
                                  period=period)
                         occupancies.append(Node('occupancy', a))
                     tags = Node('tags')
                     for tagname in self.tagcol.tagnames:
                         if tagname not in ('taxonomy', 'exposure',
                                            'country'):
                             tags.attrib[tagname] = dic[tagname]
                     asset.nodes.extend([loc, costs, occupancies, tags])
                 yield asset
Пример #24
0
    def convert_hpdist(self, node):
        """
        Convert the given node into a probability mass function for the
        hypo depth distribution.

        :param node: a hypoDepthDist node
        :returns: a :class:`openquake.hazardlib.pmf.PMF` instance
        """
        with context(self.fname, node):
            hcdist = [(hd['probability'], hd['depth'])
                      for hd in node.hypoDepthDist]
            if not self.spinning_floating:  # consider the first hypocenter
                hcdist = [(1, hcdist[0][1])]
            return pmf.PMF(hcdist)
Пример #25
0
    def convert_complexFaultRupture(self, node):
        """
        Convert a complexFaultRupture node.

        :param node: the rupture node
        """
        mag, rake, hypocenter = self.get_mag_rake_hypo(node)
        with context(self.fname, node):
            surfaces = [node.complexFaultGeometry]
        rupt = source.rupture.BaseRupture(
            mag=mag, rake=rake, tectonic_region_type=None,
            hypocenter=hypocenter,
            surface=self.convert_surfaces(surfaces))
        return rupt
Пример #26
0
    def convert_complexFaultRupture(self, node):
        """
        Convert a complexFaultRupture node.

        :param node: the rupture node
        """
        mag, rake, hypocenter = self.get_mag_rake_hypo(node)
        with context(self.fname, node):
            surfaces = [node.complexFaultGeometry]
        rupt = source.rupture.BaseRupture(
            mag=mag, rake=rake, tectonic_region_type=None,
            hypocenter=hypocenter,
            surface=self.convert_surfaces(surfaces))
        return rupt
Пример #27
0
    def convert_npdist(self, node):
        """
        Convert the given node into a Nodal Plane Distribution.

        :param node: a nodalPlaneDist node
        :returns: a :class:`openquake.hazardlib.geo.NodalPlane` instance
        """
        with context(self.fname, node):
            npdist = []
            for np in node.nodalPlaneDist:
                prob, strike, dip, rake = (np['probability'], np['strike'],
                                           np['dip'], np['rake'])
                npdist.append((prob, geo.NodalPlane(strike, dip, rake)))
            return pmf.PMF(npdist)
Пример #28
0
 def __init__(self, branch, fname):
     with context(fname, branch.uncertaintyWeight):
         nodes = list(branch.getnodes('uncertaintyWeight'))
         if 'imt' in nodes[0].attrib:
             raise InvalidLogicTree('The first uncertaintyWeight has an imt'
                                    ' attribute')
         self.dic = {'weight': float(nodes[0].text)}
         imts = []
         for n in nodes[1:]:
             self.dic[n['imt']] = float(n.text)
             imts.append(n['imt'])
         if len(set(imts)) < len(imts):
             raise InvalidLogicTree(
                 'There are duplicated IMTs in the weights')
Пример #29
0
    def convert_node(self, node):
        """
        Convert the given rupture node into a hazardlib rupture, depending
        on the node tag.

        :param node: a node representing a rupture
        """
        with context(self.fname, node):
            convert_rupture = getattr(self, 'convert_' + striptag(node.tag))
            mag = ~node.magnitude
            rake = ~node.rake
            h = node.hypocenter
            hypocenter = geo.Point(h['lon'], h['lat'], h['depth'])
        return convert_rupture(node, mag, rake, hypocenter)
Пример #30
0
def convert_fragility_model_04(node, fname, fmcounter=itertools.count(1)):
    """
    :param node:
        an :class:`openquake.commonib.node.Node` in NRML 0.4
    :param fname:
        path of the fragility file
    :returns:
        an :class:`openquake.commonib.node.Node` in NRML 0.5
    """
    convert_type = {"lognormal": "logncdf"}
    new = Node(
        'fragilityModel',
        dict(assetCategory='building',
             lossCategory='structural',
             id='fm_%d_converted_from_NRML_04' % next(fmcounter)))
    with context(fname, node):
        fmt = node['format']
        descr = ~node.description
        limit_states = ~node.limitStates
    new.append(Node('description', {}, descr))
    new.append((Node('limitStates', {}, ' '.join(limit_states))))
    for ffs in node[2:]:
        IML = ffs.IML
        # NB: noDamageLimit = None is different than zero
        nodamage = ffs.attrib.get('noDamageLimit')
        ff = Node('fragilityFunction', {'format': fmt})
        ff['id'] = ~ffs.taxonomy
        ff['shape'] = convert_type[ffs.attrib.get('type', 'lognormal')]
        if fmt == 'continuous':
            with context(fname, IML):
                attr = dict(imt=IML['IMT'],
                            minIML=IML['minIML'],
                            maxIML=IML['maxIML'])
                if nodamage is not None:
                    attr['noDamageLimit'] = nodamage
                ff.append(Node('imls', attr))
            for ffc in ffs[2:]:
                with context(fname, ffc):
                    ls = ffc['ls']
                    param = ffc.params
                with context(fname, param):
                    m, s = param['mean'], param['stddev']
                ff.append(Node('params', dict(ls=ls, mean=m, stddev=s)))
        else:  # discrete
            with context(fname, IML):
                imls = ' '.join(map(str, (~IML)[1]))
                attr = dict(imt=IML['IMT'])
            if nodamage is not None:
                attr['noDamageLimit'] = nodamage
            ff.append(Node('imls', attr, imls))
            for ffd in ffs[2:]:
                ls = ffd['ls']
                with context(fname, ffd):
                    poes = ' '.join(map(str, ~ffd.poEs))
                ff.append(Node('poes', dict(ls=ls), poes))
        new.append(ff)
    return new
Пример #31
0
def convert_fragility_model_04(node, fname, fmcounter=itertools.count(1)):
    """
    :param node:
        an :class:`openquake.commonib.node.Node` in NRML 0.4
    :param fname:
        path of the fragility file
    :returns:
        an :class:`openquake.commonib.node.Node` in NRML 0.5
    """
    convert_type = {"lognormal": "logncdf"}
    new = Node('fragilityModel',
               dict(assetCategory='building',
                    lossCategory='structural',
                    id='fm_%d_converted_from_NRML_04' %
                    next(fmcounter)))
    with context(fname, node):
        fmt = node['format']
        descr = ~node.description
        limit_states = ~node.limitStates
    new.append(Node('description', {}, descr))
    new.append((Node('limitStates', {}, ' '.join(limit_states))))
    for ffs in node[2:]:
        IML = ffs.IML
        # NB: noDamageLimit = None is different than zero
        nodamage = ffs.attrib.get('noDamageLimit')
        ff = Node('fragilityFunction', {'format': fmt})
        ff['id'] = ~ffs.taxonomy
        ff['shape'] = convert_type[ffs.attrib.get('type', 'lognormal')]
        if fmt == 'continuous':
            with context(fname, IML):
                attr = dict(imt=IML['IMT'],
                            minIML=IML['minIML'],
                            maxIML=IML['maxIML'])
                if nodamage is not None:
                    attr['noDamageLimit'] = nodamage
                ff.append(Node('imls', attr))
            for ffc in ffs[2:]:
                with context(fname, ffc):
                    ls = ffc['ls']
                    param = ffc.params
                with context(fname, param):
                    m, s = param['mean'], param['stddev']
                ff.append(Node('params', dict(ls=ls, mean=m, stddev=s)))
        else:  # discrete
            with context(fname, IML):
                imls = ' '.join(map(str, (~IML)[1]))
                attr = dict(imt=IML['IMT'])
            if nodamage is not None:
                attr['noDamageLimit'] = nodamage
            ff.append(Node('imls', attr, imls))
            for ffd in ffs[2:]:
                ls = ffd['ls']
                with context(fname, ffd):
                    poes = ' '.join(map(str, ~ffd.poEs))
                ff.append(Node('poes', dict(ls=ls), poes))
        new.append(ff)
    return new
Пример #32
0
 def _read_csv(self):
     """
     :yields: asset nodes
     """
     expected_header = self._csv_header()
     for fname in self.datafiles:
         with open(fname, encoding='utf-8') as f:
             fields = next(csv.reader(f))
             header = set(fields)
             if len(header) < len(fields):
                 raise InvalidFile(
                     '%s: The header %s contains a duplicated field' %
                     (fname, header))
             elif expected_header - header - {'exposure', 'country'}:
                 raise InvalidFile(
                     'Unexpected header in %s\nExpected: %s\nGot: %s' %
                     (fname, sorted(expected_header), sorted(header)))
     occupancy_periods = self.occupancy_periods.split()
     for fname in self.datafiles:
         with open(fname, encoding='utf-8') as f:
             for i, dic in enumerate(csv.DictReader(f), 1):
                 asset = Node('asset', lineno=i)
                 with context(fname, asset):
                     asset['id'] = dic['id']
                     asset['number'] = valid.positivefloat(dic['number'])
                     asset['taxonomy'] = dic['taxonomy']
                     if 'area' in dic:  # optional attribute
                         asset['area'] = dic['area']
                     loc = Node('location',
                                dict(lon=valid.longitude(dic['lon']),
                                     lat=valid.latitude(dic['lat'])))
                     costs = Node('costs')
                     for cost in self.cost_types['name']:
                         a = dict(type=cost, value=dic[cost])
                         if 'retrofitted' in dic:
                             a['retrofitted'] = dic['retrofitted']
                         costs.append(Node('cost', a))
                     occupancies = Node('occupancies')
                     for period in occupancy_periods:
                         a = dict(occupants=float(dic[period]),
                                  period=period)
                         occupancies.append(Node('occupancy', a))
                     tags = Node('tags')
                     for tagname in self.tagcol.tagnames:
                         if tagname not in (
                                 'taxonomy', 'exposure', 'country'):
                             tags.attrib[tagname] = dic[tagname]
                     asset.nodes.extend([loc, costs, occupancies, tags])
                 yield asset
Пример #33
0
    def convert_multiPlanesRupture(self, node):
        """
        Convert a multiPlanesRupture node.

        :param node: the rupture node
        """
        mag, rake, hypocenter = self.get_mag_rake_hypo(node)
        with context(self.fname, node):
            surfaces = list(node.getnodes('planarSurface'))
        rupt = source.rupture.BaseRupture(
            mag=mag, rake=rake,
            tectonic_region_type=None,
            hypocenter=hypocenter,
            surface=self.convert_surfaces(surfaces))
        return rupt
Пример #34
0
    def convert_multiPlanesRupture(self, node):
        """
        Convert a multiPlanesRupture node.

        :param node: the rupture node
        """
        mag, rake, hypocenter = self.get_mag_rake_hypo(node)
        with context(self.fname, node):
            surfaces = list(node.getnodes('planarSurface'))
        rupt = source.rupture.BaseRupture(
            mag=mag, rake=rake,
            tectonic_region_type=None,
            hypocenter=hypocenter,
            surface=self.convert_surfaces(surfaces))
        return rupt
Пример #35
0
    def convert_mfdist(self, node):
        """
        Convert the given node into a Magnitude-Frequency Distribution
        object.

        :param node: a node of kind incrementalMFD or truncGutenbergRichterMFD
        :returns: a :class:`openquake.hazardlib.mfd.EvenlyDiscretizedMFD.` or
                  :class:`openquake.hazardlib.mfd.TruncatedGRMFD` instance
        """
        with context(self.fname, node):
            [mfd_node] = [subnode for subnode in node
                          if subnode.tag.endswith(
                              ('incrementalMFD', 'truncGutenbergRichterMFD',
                               'arbitraryMFD', 'YoungsCoppersmithMFD',
                               'multiMFD'))]
            if mfd_node.tag.endswith('incrementalMFD'):
                return mfd.EvenlyDiscretizedMFD(
                    min_mag=mfd_node['minMag'], bin_width=mfd_node['binWidth'],
                    occurrence_rates=~mfd_node.occurRates)
            elif mfd_node.tag.endswith('truncGutenbergRichterMFD'):
                return mfd.TruncatedGRMFD(
                    a_val=mfd_node['aValue'], b_val=mfd_node['bValue'],
                    min_mag=mfd_node['minMag'], max_mag=mfd_node['maxMag'],
                    bin_width=self.width_of_mfd_bin)
            elif mfd_node.tag.endswith('arbitraryMFD'):
                return mfd.ArbitraryMFD(
                    magnitudes=~mfd_node.magnitudes,
                    occurrence_rates=~mfd_node.occurRates)
            elif mfd_node.tag.endswith('YoungsCoppersmithMFD'):
                if "totalMomentRate" in mfd_node.attrib.keys():
                    # Return Youngs & Coppersmith from the total moment rate
                    return mfd.YoungsCoppersmith1985MFD.from_total_moment_rate(
                        min_mag=mfd_node["minMag"], b_val=mfd_node["bValue"],
                        char_mag=mfd_node["characteristicMag"],
                        total_moment_rate=mfd_node["totalMomentRate"],
                        bin_width=mfd_node["binWidth"])
                elif "characteristicRate" in mfd_node.attrib.keys():
                    # Return Youngs & Coppersmith from the total moment rate
                    return mfd.YoungsCoppersmith1985MFD.\
                        from_characteristic_rate(
                            min_mag=mfd_node["minMag"],
                            b_val=mfd_node["bValue"],
                            char_mag=mfd_node["characteristicMag"],
                            char_rate=mfd_node["characteristicRate"],
                            bin_width=mfd_node["binWidth"])
            elif mfd_node.tag.endswith('multiMFD'):
                return mfd.multi_mfd.MultiMFD.from_node(
                    mfd_node, self.width_of_mfd_bin)
Пример #36
0
    def convert_sourceGroup(self, node):
        """
        Convert the given node into a SourceGroup object.

        :param node:
            a node with tag sourceGroup
        :returns:
            a :class:`SourceGroup` instance
        """
        trt = node['tectonicRegion']
        srcs_weights = node.attrib.get('srcs_weights')
        grp_attrs = {
            k: v
            for k, v in node.attrib.items()
            if k not in ('name', 'src_interdep', 'rup_interdep',
                         'srcs_weights')
        }
        sg = SourceGroup(trt)
        sg.name = node.attrib.get('name')
        sg.src_interdep = node.attrib.get('src_interdep', 'indep')
        sg.rup_interdep = node.attrib.get('rup_interdep', 'indep')
        sg.grp_probability = node.attrib.get('grp_probability')
        for src_node in node:
            if self.source_id and self.source_id != src_node['id']:
                continue  # filter by source_id
            src = self.convert_node(src_node)
            # transmit the group attributes to the underlying source
            for attr, value in grp_attrs.items():
                if attr == 'tectonicRegion':
                    src_trt = src_node.get('tectonicRegion')
                    if src_trt and src_trt != trt:
                        with context(self.fname, src_node):
                            raise ValueError('Found %s, expected %s' %
                                             (src_node['tectonicRegion'], trt))
                    src.tectonic_region_type = value
                elif attr == 'grp_probability':
                    pass  # do not transmit
                else:  # transmit as it is
                    setattr(src, attr, node[attr])
            sg.update(src)
        if srcs_weights is not None:
            if len(node) and len(srcs_weights) != len(node):
                raise ValueError(
                    'There are %d srcs_weights but %d source(s) in %s' %
                    (len(srcs_weights), len(node), self.fname))
            for src, sw in zip(sg, srcs_weights):
                src.mutex_weight = sw
        return sg
Пример #37
0
    def convert_npdist(self, node):
        """
        Convert the given node into a Nodal Plane Distribution.

        :param node: a nodalPlaneDist node
        :returns: a :class:`openquake.hazardlib.geo.NodalPlane` instance
        """
        with context(self.fname, node):
            npdist = []
            for np in node.nodalPlaneDist:
                prob, strike, dip, rake = (np['probability'], np['strike'],
                                           np['dip'], np['rake'])
                npdist.append((prob, geo.NodalPlane(strike, dip, rake)))
            if os.environ.get('OQ_SPINNING') == 'no':
                npdist = [(1, npdist[0][1])]  # consider the first nodal plane
            return pmf.PMF(npdist)
Пример #38
0
    def convert_griddedRupture(self, node):
        """
        Convert a griddedRupture node.

        :param node: the rupture node
        """
        mag, rake, hypocenter = self.get_mag_rake_hypo(node)
        with context(self.fname, node):
            surfaces = [node.griddedSurface]
        rupt = source.rupture.BaseRupture(
            mag=mag, rake=rake,
            tectonic_region_type=None,
            hypocenter=hypocenter,
            surface=self.convert_surfaces(surfaces),
            source_typology=source.NonParametricSeismicSource)
        return rupt
Пример #39
0
    def convert_griddedRupture(self, node):
        """
        Convert a griddedRupture node.

        :param node: the rupture node
        """
        mag, rake, hypocenter = self.get_mag_rake_hypo(node)
        with context(self.fname, node):
            surfaces = [node.griddedSurface]
        rupt = source.rupture.BaseRupture(
            mag=mag,
            rake=rake,
            tectonic_region_type=None,
            hypocenter=hypocenter,
            surface=self.convert_surfaces(surfaces),
            source_typology=source.NonParametricSeismicSource)
        return rupt
Пример #40
0
    def convert_simpleFaultRupture(self, node, mag, rake, hypocenter):
        """
        Convert a simpleFaultRupture node.

        :param node: the rupture node
        :param mag: the rupture magnitude
        :param rake: the rupture rake angle
        :param hypocenter: the rupture hypocenter
        """
        with context(self.fname, node):
            surfaces = [node.simpleFaultGeometry]
        rupt = source.rupture.Rupture(mag=mag,
                                      rake=rake,
                                      tectonic_region_type=None,
                                      hypocenter=hypocenter,
                                      surface=self.convert_surfaces(surfaces),
                                      source_typology=source.SimpleFaultSource)
        return rupt
Пример #41
0
    def convert_multiPlanesRupture(self, node, mag, rake, hypocenter):
        """
        Convert a multiPlanesRupture node.

        :param node: the rupture node
        :param mag: the rupture magnitude
        :param rake: the rupture rake angle
        :param hypocenter: the rupture hypocenter
        """
        with context(self.fname, node):
            surfaces = list(node.getnodes('planarSurface'))
        rupt = source.rupture.Rupture(
            mag=mag,
            rake=rake,
            tectonic_region_type=None,
            hypocenter=hypocenter,
            surface=self.convert_surfaces(surfaces),
            source_typology=source.NonParametricSeismicSource)
        return rupt
Пример #42
0
    def geo_planar(self, surface):
        """
        Utility to convert a PlanarSurface node with subnodes
        topLeft, topRight, bottomLeft, bottomRight into a
        :class:`openquake.hazardlib.geo.PlanarSurface` instance.

        :param surface: PlanarSurface node
        """
        with context(self.fname, surface):
            tl = surface.topLeft
            top_left = geo.Point(tl['lon'], tl['lat'], tl['depth'])
            tr = surface.topRight
            top_right = geo.Point(tr['lon'], tr['lat'], tr['depth'])
            bl = surface.bottomLeft
            bottom_left = geo.Point(bl['lon'], bl['lat'], bl['depth'])
            br = surface.bottomRight
            bottom_right = geo.Point(br['lon'], br['lat'], br['depth'])
        return geo.PlanarSurface.from_corner_points(
            top_left, top_right, bottom_right, bottom_left)
Пример #43
0
    def geo_planar(self, surface):
        """
        Utility to convert a PlanarSurface node with subnodes
        topLeft, topRight, bottomLeft, bottomRight into a
        :class:`openquake.hazardlib.geo.PlanarSurface` instance.

        :param surface: PlanarSurface node
        """
        with context(self.fname, surface):
            tl = surface.topLeft
            top_left = geo.Point(tl['lon'], tl['lat'], tl['depth'])
            tr = surface.topRight
            top_right = geo.Point(tr['lon'], tr['lat'], tr['depth'])
            bl = surface.bottomLeft
            bottom_left = geo.Point(bl['lon'], bl['lat'], bl['depth'])
            br = surface.bottomRight
            bottom_right = geo.Point(br['lon'], br['lat'], br['depth'])
        return geo.PlanarSurface.from_corner_points(top_left, top_right,
                                                    bottom_right, bottom_left)
Пример #44
0
    def convert_griddedRupture(self, node, mag, rake, hypocenter):
        """
        Convert a griddedRupture node.

        :param node: the rupture node
        :param mag: the rupture magnitude
        :param rake: the rupture rake angle
        :param hypocenter: the rupture hypocenter
        """
        with context(self.fname, node):
            surfaces = [node.griddedSurface]
        rupt = source.rupture.BaseRupture(
            mag=mag,
            rake=rake,
            tectonic_region_type=None,
            hypocenter=hypocenter,
            surface=self.convert_surfaces(surfaces),
            source_typology=source.NonParametricSeismicSource)
        return rupt
Пример #45
0
 def convert_ruptureCollection(self, node):
     """
     :param node: a ruptureCollection node
     :returns: a dictionary grp_id -> EBRuptures
     """
     coll = {}
     for grpnode in node:
         grp_id = int(grpnode['id'])
         coll[grp_id] = ebrs = []
         for node in grpnode:
             rup = self.convert_node(node)
             rup.serial = int(node['id'])
             sesnodes = node.stochasticEventSets
             n = 0  # number of events
             for sesnode in sesnodes:
                 with context(self.fname, sesnode):
                     n += len(sesnode.text.split())
             ebr = source.rupture.EBRupture(rup, 0, 0, numpy.array([n]))
             ebrs.append(ebr)
     return coll
Пример #46
0
def collect_files(gsim_lt_path):
    """
    Given a path to a gsim logic tree, collect all of the
    path names it contains (relevent for tabular/file-dependent GSIMs).
    """
    n = nrml.read(gsim_lt_path)
    try:
        blevels = n.logicTree
    except Exception:
        raise InvalidFile('%s is not a valid source_model_logic_tree_file' %
                          gsim_lt_path)
    paths = set()
    for blevel in blevels:
        for bset in bsnodes(gsim_lt_path, blevel):
            assert bset['uncertaintyType'] == 'gmpeModel', bset
            for br in bset:
                with context(gsim_lt_path, br):
                    relpaths = rel_paths(br.uncertaintyModel.text)
                    paths.update(abs_paths(gsim_lt_path, relpaths))
    return sorted(paths)
Пример #47
0
 def convert_ruptureCollection(self, node):
     """
     :param node: a ruptureCollection node
     :returns: a dictionary grp_id -> EBRuptures
     """
     coll = {}
     for grpnode in node:
         grp_id = int(grpnode['id'])
         coll[grp_id] = ebrs = []
         for node in grpnode:
             rup = self.convert_node(node)
             rup.rup_id = int(node['id'])
             sesnodes = node.stochasticEventSets
             n = 0  # number of events
             for sesnode in sesnodes:
                 with context(self.fname, sesnode):
                     n += len(sesnode.text.split())
             ebr = source.rupture.EBRupture(rup, 0, 0, numpy.array([n]))
             ebrs.append(ebr)
     return coll
Пример #48
0
    def convert_sourceGroup(self, node):
        """
        Convert the given node into a SourceGroup object.

        :param node:
            a node with tag sourceGroup
        :returns:
            a :class:`SourceGroup` instance
        """
        trt = node['tectonicRegion']
        srcs_weights = node.attrib.get('srcs_weights')
        grp_probability = node.attrib.get('grp_probability')
        grp_attrs = {
            k: v
            for k, v in node.attrib.items()
            if k not in ('name', 'src_interdep', 'rup_interdep',
                         'srcs_weights')
        }
        sg = SourceGroup(trt)
        for src_node in node:
            with context(self.fname, src_node):
                src = self.convert_node(src_node)
                # transmit the group attributes to the underlying source
                for attr, value in grp_attrs.items():
                    if attr == 'tectonicRegion':
                        src.tectonic_region_type = value
                    elif attr == 'grp_probability':
                        pass  # do not transmit
                    else:  # transmit as it is
                        setattr(src, attr, node[attr])
                sg.update(src)
        if srcs_weights is not None:
            if len(srcs_weights) != len(node):
                raise ValueError('There are %d srcs_weights but %d source(s)' %
                                 (len(srcs_weights), len(node)))
        sg.name = node.attrib.get('name')
        sg.src_interdep = node.attrib.get('src_interdep')
        sg.rup_interdep = node.attrib.get('rup_interdep')
        sg._srcs_weights = srcs_weights
        sg.grp_probability = grp_probability
        return sg
Пример #49
0
def collect_source_model_paths(smlt):
    """
    Given a path to a source model logic tree or a file-like, collect all of
    the soft-linked path names to the source models it contains and return them
    as a uniquified list (no duplicates).

    :param smlt: source model logic tree file
    """
    n = nrml.read(smlt)
    try:
        blevels = n.logicTree
    except:
        raise InvalidFile('%s is not a valid source_model_logic_tree_file' %
                          smlt)
    for blevel in blevels:
        with node.context(smlt, blevel):
            for bset in blevel:
                for br in bset:
                    smfname = br.uncertaintyModel.text.strip()
                    if smfname:
                        yield smfname
Пример #50
0
    def convert_simpleFaultSource(self, node):
        """
        Convert the given node into a simple fault object.

        :param node: a node with tag areaGeometry
        :returns: a :class:`openquake.hazardlib.source.SimpleFaultSource`
                  instance
        """
        geom = node.simpleFaultGeometry
        msr = valid.SCALEREL[~node.magScaleRel]()
        fault_trace = self.geo_line(geom)
        mfd = self.convert_mfdist(node)
        with context(self.fname, node):
            try:
                hypo_list = valid.hypo_list(node.hypoList)
            except AttributeError:
                hypo_list = ()
            try:
                slip_list = valid.slip_list(node.slipList)
            except AttributeError:
                slip_list = ()
            simple = source.SimpleFaultSource(
                source_id=node['id'],
                name=node['name'],
                tectonic_region_type=node.attrib.get('tectonicRegion'),
                mfd=mfd,
                rupture_mesh_spacing=self.rupture_mesh_spacing,
                magnitude_scaling_relationship=msr,
                rupture_aspect_ratio=~node.ruptAspectRatio,
                upper_seismogenic_depth=~geom.upperSeismoDepth,
                lower_seismogenic_depth=~geom.lowerSeismoDepth,
                fault_trace=fault_trace,
                dip=~geom.dip,
                rake=~node.rake,
                temporal_occurrence_model=self.tom,
                hypo_list=hypo_list,
                slip_list=slip_list)
        return simple
Пример #51
0
    def convert_sourceGroup(self, node):
        """
        Convert the given node into a SourceGroup object.

        :param node:
            a node with tag sourceGroup
        :returns:
            a :class:`SourceGroup` instance
        """
        trt = node['tectonicRegion']
        srcs_weights = node.attrib.get('srcs_weights')
        grp_probability = node.attrib.get('grp_probability')
        grp_attrs = {k: v for k, v in node.attrib.items()
                     if k not in ('name', 'src_interdep', 'rup_interdep',
                                  'srcs_weights')}
        sg = SourceGroup(trt)
        for src_node in node:
            with context(self.fname, src_node):
                src = self.convert_node(src_node)
                # transmit the group attributes to the underlying source
                for attr, value in grp_attrs.items():
                    if attr == 'tectonicRegion':
                        src.tectonic_region_type = value
                    elif attr == 'grp_probability':
                        pass  # do not transmit
                    else:  # transmit as it is
                        setattr(src, attr, node[attr])
                sg.update(src)
        if srcs_weights is not None:
            if len(srcs_weights) != len(node):
                raise ValueError('There are %d srcs_weights but %d source(s)'
                                 % (len(srcs_weights), len(node)))
        sg.name = node.attrib.get('name')
        sg.src_interdep = node.attrib.get('src_interdep')
        sg.rup_interdep = node.attrib.get('rup_interdep')
        sg._srcs_weights = srcs_weights
        sg.grp_probability = grp_probability
        return sg
Пример #52
0
 def convert_ruptureCollection(self, node):
     """
     :param node: a ruptureCollection node
     :returns: a dictionary grp_id -> EBRuptures
     """
     coll = {}
     for grpnode in node:
         grp_id = int(grpnode['id'])
         coll[grp_id] = ebrs = []
         for node in grpnode:
             rup = self.convert_node(node)
             rupid = int(node['id'])
             sesnodes = node.stochasticEventSets
             events = []
             for sesnode in sesnodes:
                 with context(self.fname, sesnode):
                     ses = sesnode['id']
                     for eid in (~sesnode).split():
                         events.append((eid, ses, 0))
             ebr = source.rupture.EBRupture(
                 rup, None, numpy.array(events, event_dt), grp_id, rupid)
             ebrs.append(ebr)
     return coll
Пример #53
0
def get_fragility_model(node, fname):
    """
    :param node:
        a vulnerabilityModel node
    :param fname:
        path to the vulnerability file
    :returns:
        a dictionary imt, ff_id -> fragility function list
    """
    with context(fname, node):
        fid = node['id']
        asset_category = node['assetCategory']
        loss_type = node['lossCategory']
        description = ~node.description
        limit_states = ~node.limitStates
        ffs = node[2:]
    fmodel = scientific.FragilityModel(
        fid, asset_category, loss_type, description, limit_states)
    for ff in ffs:
        array, attrs = ffconvert(fname, limit_states, ff)
        attrs['id'] = ff['id']
        ffl = scientific.FragilityFunctionList(array, **attrs)
        fmodel[ff.imls['imt'], ff['id']] = ffl
    return fmodel
Пример #54
0
    def _add_asset(self, idx, asset_node, param):
        values = {}
        deductibles = {}
        insurance_limits = {}
        retrofitted = None
        asset_id = asset_node['id'].encode('utf8')
        prefix = param['asset_prefix'].encode('utf8')
        # FIXME: in case of an exposure split in CSV files the line number
        # is None because param['fname'] points to the .xml file :-(
        with context(param['fname'], asset_node):
            self.asset_refs.append(prefix + asset_id)
            taxonomy = asset_node['taxonomy']
            if 'damage' in param['calculation_mode']:
                # calculators of 'damage' kind require the 'number'
                # if it is missing a KeyError is raised
                number = asset_node['number']
            else:
                # some calculators ignore the 'number' attribute;
                # if it is missing it is considered 1, since we are going
                # to multiply by it
                try:
                    number = asset_node['number']
                except KeyError:
                    number = 1
                else:
                    if 'occupants' in self.cost_types['name']:
                        values['occupants_None'] = number
            location = asset_node.location['lon'], asset_node.location['lat']
            if param['region'] and not geometry.Point(*location).within(
                    param['region']):
                param['out_of_region'] += 1
                return
            tagnode = getattr(asset_node, 'tags', None)
            dic = {} if tagnode is None else tagnode.attrib.copy()
            dic['taxonomy'] = taxonomy
            idxs = self.tagcol.add_tags(dic, prefix)
        try:
            costs = asset_node.costs
        except AttributeError:
            costs = Node('costs', [])
        try:
            occupancies = asset_node.occupancies
        except AttributeError:
            occupancies = Node('occupancies', [])
        for cost in costs:
            with context(param['fname'], cost):
                cost_type = cost['type']
                if cost_type == 'structural':
                    # retrofitted is defined only for structural
                    retrofitted = float(cost.get('retrofitted', 0))
                if cost_type in param['relevant_cost_types']:
                    values[cost_type] = float(cost['value'])
                    try:
                        deductibles[cost_type] = cost['deductible']
                    except KeyError:
                        pass
                    try:
                        insurance_limits[cost_type] = cost['insuranceLimit']
                    except KeyError:
                        pass

        # check we are not missing a cost type
        missing = param['relevant_cost_types'] - set(values)
        if missing and missing <= param['ignore_missing_costs']:
            logging.warning(
                'Ignoring asset %s, missing cost type(s): %s',
                asset_id, ', '.join(missing))
            for cost_type in missing:
                values[cost_type] = None
        elif missing and 'damage' not in param['calculation_mode']:
            # missing the costs is okay for damage calculators
            with context(param['fname'], asset_node):
                raise ValueError("Invalid Exposure. "
                                 "Missing cost %s for asset %s" % (
                                     missing, asset_id))
        tot_occupants = 0
        for occupancy in occupancies:
            with context(param['fname'], occupancy):
                occupants = 'occupants_%s' % occupancy['period']
                values[occupants] = float(occupancy['occupants'])
                tot_occupants += values[occupants]
        if occupancies:  # store average occupants
            values['occupants_None'] = tot_occupants / len(occupancies)
        area = float(asset_node.get('area', 1))
        ass = Asset(idx, idxs, number, location, values, area,
                    deductibles, insurance_limits, retrofitted,
                    self.cost_calculator)
        self.assets.append(ass)
Пример #55
0
def _get_exposure(fname, stop=None):
    """
    :param fname:
        path of the XML file containing the exposure
    :param stop:
        node at which to stop parsing (or None)
    :returns:
        a pair (Exposure instance, list of asset nodes)
    """
    [exposure] = nrml.read(fname, stop=stop)
    if not exposure.tag.endswith('exposureModel'):
        raise InvalidFile('%s: expected exposureModel, got %s' %
                          (fname, exposure.tag))
    description = exposure.description
    try:
        conversions = exposure.conversions
    except AttributeError:
        conversions = Node('conversions', nodes=[Node('costTypes', [])])
    try:
        inslimit = conversions.insuranceLimit
    except AttributeError:
        inslimit = Node('insuranceLimit', text=True)
    try:
        deductible = conversions.deductible
    except AttributeError:
        deductible = Node('deductible', text=True)
    try:
        area = conversions.area
    except AttributeError:
        # NB: the area type cannot be an empty string because when sending
        # around the CostCalculator object we would run into this numpy bug
        # about pickling dictionaries with empty strings:
        # https://github.com/numpy/numpy/pull/5475
        area = Node('area', dict(type='?'))
    try:
        occupancy_periods = exposure.occupancyPeriods.text or ''
    except AttributeError:
        occupancy_periods = ''
    try:
        tagNames = exposure.tagNames
    except AttributeError:
        tagNames = Node('tagNames', text='')
    tagnames = ~tagNames or []
    if set(tagnames) & {'taxonomy', 'exposure', 'country'}:
        raise InvalidFile('taxonomy, exposure and country are reserved names '
                          'you cannot use it in <tagNames>: %s' % fname)
    tagnames.insert(0, 'taxonomy')

    # read the cost types and make some check
    cost_types = []
    retrofitted = False
    for ct in conversions.costTypes:
        with context(fname, ct):
            ctname = ct['name']
            if ctname == 'structural' and 'retrofittedType' in ct.attrib:
                if ct['retrofittedType'] != ct['type']:
                    raise ValueError(
                        'The retrofittedType %s is different from the type'
                        '%s' % (ct['retrofittedType'], ct['type']))
                if ct['retrofittedUnit'] != ct['unit']:
                    raise ValueError(
                        'The retrofittedUnit %s is different from the unit'
                        '%s' % (ct['retrofittedUnit'], ct['unit']))
                retrofitted = True
            cost_types.append(
                (ctname, valid.cost_type_type(ct['type']), ct['unit']))
    if 'occupants' in cost_types:
        cost_types.append(('occupants', 'per_area', 'people'))
    cost_types.sort(key=operator.itemgetter(0))
    cost_types = numpy.array(cost_types, cost_type_dt)
    insurance_limit_is_absolute = il = inslimit.get('isAbsolute')
    deductible_is_absolute = de = deductible.get('isAbsolute')
    cc = CostCalculator(
        {}, {}, {},
        True if de is None else de,
        True if il is None else il,
        {name: i for i, name in enumerate(tagnames)},
    )
    for ct in cost_types:
        name = ct['name']  # structural, nonstructural, ...
        cc.cost_types[name] = ct['type']  # aggregated, per_asset, per_area
        cc.area_types[name] = area['type']
        cc.units[name] = ct['unit']
    assets = []
    asset_refs = []
    exp = Exposure(
        exposure['id'], exposure['category'],
        description.text, cost_types, occupancy_periods,
        insurance_limit_is_absolute, deductible_is_absolute, retrofitted,
        area.attrib, assets, asset_refs, cc, TagCollection(tagnames))
    assets_text = exposure.assets.text.strip()
    if assets_text:
        # the <assets> tag contains a list of file names
        dirname = os.path.dirname(fname)
        exp.datafiles = [os.path.join(dirname, f) for f in assets_text.split()]
    else:
        exp.datafiles = []
    return exp, exposure.assets
Пример #56
0
    def convert_sourceGroup(self, node):
        """
        Convert the given node into a SourceGroup object.

        :param node:
            a node with tag sourceGroup
        :returns:
            a :class:`SourceGroup` instance
        """
        trt = node['tectonicRegion']
        srcs_weights = node.attrib.get('srcs_weights')
        grp_attrs = {k: v for k, v in node.attrib.items()
                     if k not in ('name', 'src_interdep', 'rup_interdep',
                                  'srcs_weights')}
        sg = SourceGroup(trt, min_mag=self.minimum_magnitude)
        sg.temporal_occurrence_model = self.get_tom(node)
        sg.name = node.attrib.get('name')
        # Set attributes related to occurrence
        sg.src_interdep = node.attrib.get('src_interdep', 'indep')
        sg.rup_interdep = node.attrib.get('rup_interdep', 'indep')
        sg.grp_probability = node.attrib.get('grp_probability')
        # Set the cluster attribute
        sg.cluster = node.attrib.get('cluster') == 'true'
        # Filter admitted cases
        # 1. The source group is a cluster. In this case the cluster must have
        #    the attributes required to define its occurrence in time.
        if sg.cluster:
            msg = 'A cluster group requires the definition of a temporal'
            msg += ' occurrence model'
            assert 'tom' in node.attrib, msg
            if isinstance(tom, PoissonTOM):
                assert hasattr(sg, 'occurrence_rate')
        #
        for src_node in node:
            if self.source_id and self.source_id != src_node['id']:
                continue  # filter by source_id
            src = self.convert_node(src_node)
            # transmit the group attributes to the underlying source
            for attr, value in grp_attrs.items():
                if attr == 'tectonicRegion':
                    src_trt = src_node.get('tectonicRegion')
                    if src_trt and src_trt != trt:
                        with context(self.fname, src_node):
                            raise ValueError('Found %s, expected %s' %
                                             (src_node['tectonicRegion'], trt))
                    src.tectonic_region_type = trt
                elif attr == 'grp_probability':
                    pass  # do not transmit
                else:  # transmit as it is
                    setattr(src, attr, node[attr])
            sg.update(src)
        if srcs_weights is not None:
            if len(node) and len(srcs_weights) != len(node):
                raise ValueError(
                    'There are %d srcs_weights but %d source(s) in %s'
                    % (len(srcs_weights), len(node), self.fname))
            for src, sw in zip(sg, srcs_weights):
                src.mutex_weight = sw
        # check that, when the cluster option is set, the group has a temporal
        # occurrence model properly defined
        if sg.cluster and not hasattr(sg, 'temporal_occurrence_model'):
            msg = 'The Source Group is a cluster but does not have a '
            msg += 'temporal occurrence model'
            raise ValueError(msg)
        return sg
Пример #57
0
def ffconvert(fname, limit_states, ff, min_iml=1E-10):
    """
    Convert a fragility function into a numpy array plus a bunch
    of attributes.

    :param fname: path to the fragility model file
    :param limit_states: expected limit states
    :param ff: fragility function node
    :returns: a pair (array, dictionary)
    """
    with context(fname, ff):
        ffs = ff[1:]
        imls = ff.imls
    nodamage = imls.attrib.get('noDamageLimit')
    if nodamage == 0:
        # use a cutoff to avoid log(0) in GMPE.to_distribution_values
        logging.warning('Found a noDamageLimit=0 in %s, line %s, '
                        'using %g instead', fname, ff.lineno, min_iml)
        nodamage = min_iml
    with context(fname, imls):
        attrs = dict(format=ff['format'],
                     imt=imls['imt'],
                     id=ff['id'],
                     nodamage=nodamage)

    LS = len(limit_states)
    if LS != len(ffs):
        with context(fname, ff):
            raise InvalidFile('expected %d limit states, found %d' %
                              (LS, len(ffs)))
    if ff['format'] == 'continuous':
        minIML = float(imls['minIML'])
        if minIML == 0:
            # use a cutoff to avoid log(0) in GMPE.to_distribution_values
            logging.warning('Found minIML=0 in %s, line %s, using %g instead',
                            fname, imls.lineno, min_iml)
            minIML = min_iml
        attrs['minIML'] = minIML
        attrs['maxIML'] = float(imls['maxIML'])
        array = numpy.zeros(LS, [('mean', F64), ('stddev', F64)])
        for i, ls, node in zip(range(LS), limit_states, ff[1:]):
            if ls != node['ls']:
                with context(fname, node):
                    raise InvalidFile('expected %s, found' %
                                      (ls, node['ls']))
            array['mean'][i] = node['mean']
            array['stddev'][i] = node['stddev']
    elif ff['format'] == 'discrete':
        attrs['imls'] = ~imls
        valid.check_levels(attrs['imls'], attrs['imt'], min_iml)
        num_poes = len(attrs['imls'])
        array = numpy.zeros((LS, num_poes))
        for i, ls, node in zip(range(LS), limit_states, ff[1:]):
            with context(fname, node):
                if ls != node['ls']:
                    raise InvalidFile('expected %s, found' %
                                      (ls, node['ls']))
                poes = (~node if isinstance(~node, list)
                        else valid.probabilities(~node))
                if len(poes) != num_poes:
                    raise InvalidFile('expected %s, found' %
                                      (num_poes, len(poes)))
                array[i, :] = poes
    # NB: the format is constrained in nrml.FragilityNode to be either
    # discrete or continuous, there is no third option
    return array, attrs
Пример #58
0
def get_vulnerability_functions_05(node, fname):
    """
    :param node:
        a vulnerabilityModel node
    :param fname:
        path of the vulnerability filter
    :returns:
        a dictionary imt, vf_id -> vulnerability function
    """
    # NB: the IMTs can be duplicated and with different levels, each
    # vulnerability function in a set will get its own levels
    vf_ids = set()
    vmodel = scientific.VulnerabilityModel(**node.attrib)
    # imt, vf_id -> vulnerability function
    for vfun in node.getnodes('vulnerabilityFunction'):
        with context(fname, vfun):
            imt = vfun.imls['imt']
            imls = numpy.array(~vfun.imls)
            vf_id = vfun['id']
        if vf_id in vf_ids:
            raise InvalidFile(
                'Duplicated vulnerabilityFunctionID: %s: %s, line %d' %
                (vf_id, fname, vfun.lineno))
        vf_ids.add(vf_id)
        num_probs = None
        if vfun['dist'] == 'PM':
            loss_ratios, probs = [], []
            for probabilities in vfun[1:]:
                loss_ratios.append(probabilities['lr'])
                probs.append(valid.probabilities(~probabilities))
                if num_probs is None:
                    num_probs = len(probs[-1])
                elif len(probs[-1]) != num_probs:
                    raise ValueError(
                        'Wrong number of probabilities (expected %d, '
                        'got %d) in %s, line %d' %
                        (num_probs, len(probs[-1]), fname,
                         probabilities.lineno))
            all_probs = numpy.array(probs)
            assert all_probs.shape == (len(loss_ratios), len(imls)), (
                len(loss_ratios), len(imls))
            vmodel[imt, vf_id] = (
                scientific.VulnerabilityFunctionWithPMF(
                    vf_id, imt, imls, numpy.array(loss_ratios),
                    all_probs))
            # the seed will be set by readinput.get_risk_model
        else:
            with context(fname, vfun):
                loss_ratios = ~vfun.meanLRs
                coefficients = ~vfun.covLRs
            if len(loss_ratios) != len(imls):
                raise InvalidFile(
                    'There are %d loss ratios, but %d imls: %s, line %d' %
                    (len(loss_ratios), len(imls), fname,
                     vfun.meanLRs.lineno))
            if len(coefficients) != len(imls):
                raise InvalidFile(
                    'There are %d coefficients, but %d imls: %s, '
                    'line %d' % (len(coefficients), len(imls), fname,
                                 vfun.covLRs.lineno))
            with context(fname, vfun):
                vmodel[imt, vf_id] = scientific.VulnerabilityFunction(
                    vf_id, imt, imls, loss_ratios, coefficients,
                    vfun['dist'])
    return vmodel