Пример #1
0
    def serialize(self, data):
        """
        Write a sequence of uniform hazard spectra to the specified file.

        :param data:
            Iterable of UHS data. Each datum must be an object with the
            following attributes:

            * imls: A sequence of Intensity Measure Levels
            * location: An object representing the location of the curve; must
              have `x` and `y` to represent lon and lat, respectively.
        """
        gml_ns = nrml.SERIALIZE_NS_MAP['gml']

        with open(self.dest, 'wb') as fh:
            root = et.Element('nrml')

            uh_spectra = et.SubElement(root, 'uniformHazardSpectra')

            _set_metadata(uh_spectra, self.metadata, _ATTR_MAP)

            periods_elem = et.SubElement(uh_spectra, 'periods')
            periods_elem.text = ' '.join([str(x)
                                          for x in self.metadata['periods']])

            for uhs in data:
                uhs_elem = et.SubElement(uh_spectra, 'uhs')
                gml_point = et.SubElement(uhs_elem, '{%s}Point' % gml_ns)
                gml_pos = et.SubElement(gml_point, '{%s}pos' % gml_ns)
                gml_pos.text = '%s %s' % (uhs.location.x, uhs.location.y)
                imls_elem = et.SubElement(uhs_elem, 'IMLs')
                imls_elem.text = ' '.join([str(x) for x in uhs.imls])

            nrml.write(list(root), fh)
Пример #2
0
    def serialize(self, total_fractions, locations_fractions):
        """
        Actually serialize the fractions.

        :param dict total_fractions:
            maps a value of `variable` with a tuple representing the absolute
            losses and the fraction
        :param dict locations_fractions:
            a dictionary mapping a tuple (longitude, latitude) to
            bins. Each bin is a dictionary with the same structure of
            `total_fractions`.
        """

        def write_bins(parent, bin_data):
            for value, (absolute_loss, fraction) in bin_data.items():
                bin_element = et.SubElement(parent, "bin")
                bin_element.set("value", str(value))
                bin_element.set("absoluteLoss", FIVEDIGITS % absolute_loss)
                bin_element.set("fraction", FIVEDIGITS % fraction)

        with open(self.dest, 'wb') as output:
            root = et.Element("nrml")

            # container element
            container = et.SubElement(root, "lossFraction")
            container.set("investigationTime",
                          "%.2f" % self.hazard_metadata.investigation_time)

            if self.poe is not None:
                container.set("poE", "%.4f" % self.poe)

            container.set(
                "sourceModelTreePath", self.hazard_metadata.sm_path or "")
            container.set("gsimTreePath", self.hazard_metadata.gsim_path or "")

            if self.hazard_metadata.statistics is not None:
                container.set("statistics", self.hazard_metadata.statistics)

            if self.hazard_metadata.quantile is not None:
                container.set(
                    "quantileValue", "%.4f" % self.hazard_metadata.quantile)
            container.set("lossCategory", self.loss_category)
            container.set("unit", self.loss_unit)
            container.set("variable", self.variable)
            container.set("lossType", self.loss_type)

            # total fractions
            total = et.SubElement(container, "total")
            write_bins(total, total_fractions)

            # map
            map_element = et.SubElement(container, "map")

            for lon_lat, bin_data in locations_fractions.items():
                node_element = et.SubElement(map_element, "node")
                node_element.set("lon", str(lon_lat[0]))
                node_element.set("lat", str(lon_lat[1]))
                write_bins(node_element, bin_data)

            nrml.write(list(root), output)
def csv_to_xml(input_csv, output_xml):
    """
    Parses the site model from an input (headed) csv file to an output xml
    """
    data = np.genfromtxt(input_csv, delimiter=",", names=True)
    site_nodes = []
    for i in range(0, len(data)):
        if bool(data["vs30Type"][i]):
            vs30_type = "measured"
        else:
            vs30_type = "inferred"
        site_attrib = [
            ("lon", str(data["longitude"][i])),
            ("lat", str(data["latitude"][i])),
            ("vs30", str(data["vs30"][i])),
            ("vs30Type", vs30_type),
            ("z1pt0", str(data["z1pt0"][i])),
            ("z2pt5", str(data["z2pt5"][i])),
        ]
        if "backarc" in data:
            site_attrib.append(("backarc", str(bool(data["backarc"][i]))))
        else:
            site_attrib.append(("backarc", "False"))
        site_nodes.append(Node("site", OrderedDict(site_attrib), nodes=None))
    site_model = Node("siteModel", nodes=site_nodes)
    with open(output_xml, "w") as fid:
        nrml.write([site_model], fid, "%s")
Пример #4
0
def upgrade_file(path):
    """Upgrade to the latest NRML version"""
    node0 = nrml.read(path, chatty=False)[0]
    shutil.copy(path, path + '.bak')  # make a backup of the original file
    tag = striptag(node0.tag)
    gml = True
    if tag == 'vulnerabilityModel':
        vf_dict, cat_dict = get_vulnerability_functions_04(path)
        # below I am converting into a NRML 0.5 vulnerabilityModel
        node0 = Node(
            'vulnerabilityModel', cat_dict,
            nodes=list(map(riskmodels.obj_to_node, vf_dict.values())))
        gml = False
    elif tag == 'fragilityModel':
        node0 = riskmodels.convert_fragility_model_04(
            nrml.read(path)[0], path)
        gml = False
    elif tag == 'sourceModel':
        node0 = nrml.read(path)[0]
        dic = groupby(node0.nodes, operator.itemgetter('tectonicRegion'))
        node0.nodes = [Node('sourceGroup',
                            dict(tectonicRegion=trt, name="group %s" % i),
                            nodes=srcs)
                       for i, (trt, srcs) in enumerate(dic.items(), 1)]
    with open(path, 'w') as f:
        nrml.write([node0], f, gml=gml)
Пример #5
0
    def serialize(self, data):
        """
        Write a sequence of uniform hazard spectra to the specified file.

        :param data:
            Iterable of UHS data. Each datum must be an object with the
            following attributes:

            * imls: A sequence of Itensity Measure Levels
            * location: An object representing the location of the curve; must
              have `x` and `y` to represent lon and lat, respectively.
        """
        gml_ns = nrml.SERIALIZE_NS_MAP['gml']

        with nrml.NRMLFile(self.dest, 'w') as fh:
            root = et.Element('nrml')

            uh_spectra = et.SubElement(root, 'uniformHazardSpectra')

            _set_metadata(uh_spectra, self.metadata, _ATTR_MAP)

            periods_elem = et.SubElement(uh_spectra, 'periods')
            periods_elem.text = ' '.join(
                [str(x) for x in self.metadata['periods']])

            for uhs in data:
                uhs_elem = et.SubElement(uh_spectra, 'uhs')
                gml_point = et.SubElement(uhs_elem, '{%s}Point' % gml_ns)
                gml_pos = et.SubElement(gml_point, '{%s}pos' % gml_ns)
                gml_pos.text = '%s %s' % (uhs.location.x, uhs.location.y)
                imls_elem = et.SubElement(uhs_elem, 'IMLs')
                imls_elem.text = ' '.join([str(x) for x in uhs.imls])

            nrml.write(list(root), fh)
Пример #6
0
def event_set_to_rupture_xmls(input_ses, output_dir):
    """
    Parses the entire event set to a set of files
    """
    if os.path.exists(output_dir):
        raise IOError("Output directory %s already exists" % output_dir)
    else:
        os.mkdir(output_dir)
    nodeset = nrml.read(input_ses, chatty=False)
    for sesc in nodeset:
        sesc_dir = os.path.join(
            output_dir, 
            "smltp_{:s}".format(sesc["sourceModelTreePath"]))
        os.mkdir(sesc_dir)
        for i, ses in enumerate(sesc):
            ses_dir = os.path.join(sesc_dir, "ses_{:s}".format(str(ses["id"])))
            os.mkdir(ses_dir)
            for rupture in ses:
                print "Parsing event %s" % rupture["id"]
                if hasattr(rupture, "planarSurface"):
                    rupture_node = parse_planar_surface(rupture)
                elif hasattr(rupture, "mesh"):
                    rupture_node = parse_mesh_surface(rupture)
                rup_id = rupture["id"].replace("=", "_")
                filename = os.path.join(ses_dir,
                                        rup_id.replace("|", "_") + ".xml")
                with open(filename, "w") as f:
                    nrml.write([rupture_node], f, "%s")
Пример #7
0
    def serialize(self, data):
        """
        Serialize loss map data to XML.

        See :meth:`LossMapWriter.serialize` for expected input.
        """
        _assert_valid_input(data)

        with open(self._dest, 'wb') as output:
            root = et.Element("nrml")

            loss_map_el = self._create_loss_map_elem(root)

            current_location = None
            current_node = None
            for loss in data:

                if (current_location is None or
                        loss.location.wkt != current_location):
                    current_node = et.SubElement(loss_map_el, "node")
                    current_location = _append_location(
                        current_node, loss.location)

                loss_elem = et.SubElement(current_node, "loss")
                loss_elem.set("assetRef", str(loss.asset_ref))

                if loss.std_dev is not None:
                    loss_elem.set("mean", FIVEDIGITS % loss.value)
                    loss_elem.set("stdDev", FIVEDIGITS % loss.std_dev)
                else:
                    loss_elem.set("value", FIVEDIGITS % loss.value)

            nrml.write(list(root), output)
Пример #8
0
    def serialize(self, data):
        """
        Serialize loss map data to XML.

        See :meth:`LossMapWriter.serialize` for expected input.
        """
        _assert_valid_input(data)

        with NRMLFile(self._dest, 'w') as output:
            root = et.Element("nrml")

            loss_map_el = self._create_loss_map_elem(root)

            current_location = None
            current_node = None
            for loss in data:

                if (current_location is None
                        or loss.location.wkt != current_location):
                    current_node = et.SubElement(loss_map_el, "node")
                    current_location = _append_location(
                        current_node, loss.location)

                loss_elem = et.SubElement(current_node, "loss")
                loss_elem.set("assetRef", str(loss.asset_ref))

                if loss.std_dev is not None:
                    loss_elem.set("mean", FIVEDIGITS % loss.value)
                    loss_elem.set("stdDev", FIVEDIGITS % loss.std_dev)
                else:
                    loss_elem.set("value", FIVEDIGITS % loss.value)

            nrml.write(list(root), output)
Пример #9
0
    def serialize(self, total_fractions, locations_fractions):
        """
        Actually serialize the fractions.

        :param dict total_fractions:
            maps a value of `variable` with a tuple representing the absolute
            losses and the fraction
        :param dict locations_fractions:
            a dictionary mapping a tuple (longitude, latitude) to
            bins. Each bin is a dictionary with the same structure of
            `total_fractions`.
        """

        def write_bins(parent, bin_data):
            for value, (absolute_loss, fraction) in bin_data.items():
                bin_element = et.SubElement(parent, "bin")
                bin_element.set("value", str(value))
                bin_element.set("absoluteLoss", "%.4e" % absolute_loss)
                bin_element.set("fraction", "%.5f" % fraction)

        with NRMLFile(self.dest, 'w') as output:
            root = et.Element("nrml")

            # container element
            container = et.SubElement(root, "lossFraction")
            container.set("investigationTime",
                          "%.2f" % self.hazard_metadata.investigation_time)

            if self.poe is not None:
                container.set("poE", "%.4f" % self.poe)

            container.set(
                "sourceModelTreePath", self.hazard_metadata.sm_path or "")
            container.set("gsimTreePath", self.hazard_metadata.gsim_path or "")

            if self.hazard_metadata.statistics is not None:
                container.set("statistics", self.hazard_metadata.statistics)

            if self.hazard_metadata.quantile is not None:
                container.set(
                    "quantileValue", "%.4f" % self.hazard_metadata.quantile)
            container.set("lossCategory", self.loss_category)
            container.set("unit", self.loss_unit)
            container.set("variable", self.variable)
            container.set("lossType", self.loss_type)

            # total fractions
            total = et.SubElement(container, "total")
            write_bins(total, total_fractions)

            # map
            map_element = et.SubElement(container, "map")

            for lon_lat, bin_data in locations_fractions.items():
                node_element = et.SubElement(map_element, "node")
                node_element.set("lon", str(lon_lat[0]))
                node_element.set("lat", str(lon_lat[1]))
                write_bins(node_element, bin_data)

            nrml.write(list(root), output)
Пример #10
0
    def serialize(self, data):
        """
        :param data:

            A sequence of data where each datum has the following attributes:

            * matrix: N-dimensional numpy array containing the disaggregation
              histogram.
            * dim_labels: A list of strings which label the dimensions of a
              given histogram. For example, for a Magnitude-Distance-Epsilon
              histogram, we would expect `dim_labels` to be
              ``['Mag', 'Dist', 'Eps']``.
            * poe: The disaggregation Probability of Exceedance level for which
              these results were produced.
            * iml: Intensity measure level, interpolated from the source hazard
              curve at the given ``poe``.
        """

        with nrml.NRMLFile(self.dest, 'w') as fh:
            root = et.Element('nrml')

            diss_matrices = et.SubElement(root, 'disaggMatrices')

            _set_metadata(diss_matrices, self.metadata, _ATTR_MAP)

            transform = lambda val: ', '.join(map(scientificformat, val))
            _set_metadata(diss_matrices,
                          self.metadata,
                          self.BIN_EDGE_ATTR_MAP,
                          transform=transform)

            for result in data:
                diss_matrix = et.SubElement(diss_matrices, 'disaggMatrix')

                # Check that we have bin edges defined for each dimension label
                # (mag, dist, lon, lat, eps, TRT)
                for label in result.dim_labels:
                    bin_edge_attr = self.DIM_LABEL_TO_BIN_EDGE_MAP.get(label)
                    assert self.metadata.get(bin_edge_attr) is not None, (
                        "Writer is missing '%s' metadata" % bin_edge_attr)

                result_type = ','.join(result.dim_labels)
                diss_matrix.set('type', result_type)

                dims = ','.join(str(x) for x in result.matrix.shape)
                diss_matrix.set('dims', dims)

                diss_matrix.set('poE', scientificformat(result.poe))
                diss_matrix.set('iml', scientificformat(result.iml))

                for idxs, value in numpy.ndenumerate(result.matrix):
                    prob = et.SubElement(diss_matrix, 'prob')

                    index = ','.join([str(x) for x in idxs])
                    prob.set('index', index)
                    prob.set('value', scientificformat(value))

            nrml.write(list(root), fh)
Пример #11
0
    def serialize(self, data):
        """
        :param data:

            A sequence of data where each datum has the following attributes:

            * matrix: N-dimensional numpy array containing the disaggregation
              histogram.
            * dim_labels: A list of strings which label the dimensions of a
              given histogram. For example, for a Magnitude-Distance-Epsilon
              histogram, we would expect `dim_labels` to be
              ``['Mag', 'Dist', 'Eps']``.
            * poe: The disaggregation Probability of Exceedance level for which
              these results were produced.
            * iml: Intensity measure level, interpolated from the source hazard
              curve at the given ``poe``.
        """

        with open(self.dest, 'wb') as fh, floatformat('%.6E'):
            root = et.Element('nrml')

            diss_matrices = et.SubElement(root, 'disaggMatrices')

            _set_metadata(diss_matrices, self.metadata, _ATTR_MAP)

            transform = lambda val: ', '.join(map(scientificformat, val))
            _set_metadata(diss_matrices, self.metadata, self.BIN_EDGE_ATTR_MAP,
                          transform=transform)

            for result in data:
                diss_matrix = et.SubElement(diss_matrices, 'disaggMatrix')

                # Check that we have bin edges defined for each dimension label
                # (mag, dist, lon, lat, eps, TRT)
                for label in result.dim_labels:
                    bin_edge_attr = self.DIM_LABEL_TO_BIN_EDGE_MAP.get(label)
                    assert self.metadata.get(bin_edge_attr) is not None, (
                        "Writer is missing '%s' metadata" % bin_edge_attr
                    )

                result_type = ','.join(result.dim_labels)
                diss_matrix.set('type', result_type)

                dims = ','.join(str(x) for x in result.matrix.shape)
                diss_matrix.set('dims', dims)

                diss_matrix.set('poE', scientificformat(result.poe))
                diss_matrix.set('iml', scientificformat(result.iml))

                for idxs, value in numpy.ndenumerate(result.matrix):
                    prob = et.SubElement(diss_matrix, 'prob')

                    index = ','.join([str(x) for x in idxs])
                    prob.set('index', index)
                    prob.set('value', scientificformat(value))

            nrml.write(list(root), fh)
Пример #12
0
def upgrade_file(path):
    """Upgrade to the latest NRML version"""
    node0 = nrml.read(path, chatty=False)[0]
    shutil.copy(path, path + '.bak')  # make a backup of the original file
    if striptag(node0.tag) == 'vulnerabilityModel':
        vf_dict, cat_dict = get_vulnerability_functions_04(path)
        node0 = LiteralNode(
            'vulnerabilityModel', cat_dict,
            nodes=list(map(riskmodels.obj_to_node, list(vf_dict.values()))))
    with open(path, 'w') as f:
        nrml.write([node0], f)
Пример #13
0
    def serialize(self, data):
        """
        Serialize a collection of (benefit cost) ratios.

        :param data:
            An iterable of bcr objects. Each object should:

            * define an attribute `location`, which is itself an object
              defining two attributes, `x` containing the longitude value
              and `y` containing the latitude value. Also, it must define
              an attribute `wkt`, which is the Well-known text
              representation of the location.
            * define an attribute `asset_ref`, which contains the unique
              identifier of the asset related to the (benefit cost) ratio.
            * define an attribute `average_annual_loss_original`, which is
              the expected average annual economic loss using the original
              vulnerability of the asset.
            * define an attribute `average_annual_loss_retrofitted`,
              which is the expected average annual economic loss using the
              improved (better design or retrofitted) vulnerability
              of the asset.
            * define an attribute `bcr`, which is the value of the (
              benefit cost) ratio.
        """

        _assert_valid_input(data)

        with open(self._path, "wb") as output:
            root = et.Element("nrml")

            for bcr in data:
                if self._bcr_map is None:
                    self._create_bcr_map_elem(root)

                bcr_node = self._bcr_nodes.get(bcr.location.wkt)

                if bcr_node is None:
                    bcr_node = et.SubElement(self._bcr_map, "node")
                    _append_location(bcr_node, bcr.location)
                    self._bcr_nodes[bcr.location.wkt] = bcr_node

                bcr_elem = et.SubElement(bcr_node, "bcr")
                bcr_elem.set("assetRef", str(bcr.asset_ref))
                bcr_elem.set("ratio", str(bcr.bcr))

                bcr_elem.set("aalOrig", str(
                    bcr.average_annual_loss_original))

                bcr_elem.set("aalRetr", str(
                    bcr.average_annual_loss_retrofitted))

            nrml.write(list(root), output)
Пример #14
0
    def serialize(self, data):
        """
        Serialize a collection of (benefit cost) ratios.

        :param data:
            An iterable of bcr objects. Each object should:

            * define an attribute `location`, which is itself an object
              defining two attributes, `x` containing the longitude value
              and `y` containing the latitude value. Also, it must define
              an attribute `wkt`, which is the Well-known text
              representation of the location.
            * define an attribute `asset_ref`, which contains the unique
              identifier of the asset related to the (benefit cost) ratio.
            * define an attribute `average_annual_loss_original`, which is
              the expected average annual economic loss using the original
              vulnerability of the asset.
            * define an attribute `average_annual_loss_retrofitted`,
              which is the expected average annual economic loss using the
              improved (better design or retrofitted) vulnerability
              of the asset.
            * define an attribute `bcr`, which is the value of the (
              benefit cost) ratio.
        """

        _assert_valid_input(data)

        with open(self._path, "wb") as output:
            root = et.Element("nrml")

            for bcr in data:
                if self._bcr_map is None:
                    self._create_bcr_map_elem(root)

                bcr_node = self._bcr_nodes.get(bcr.location.wkt)

                if bcr_node is None:
                    bcr_node = et.SubElement(self._bcr_map, "node")
                    _append_location(bcr_node, bcr.location)
                    self._bcr_nodes[bcr.location.wkt] = bcr_node

                bcr_elem = et.SubElement(bcr_node, "bcr")
                bcr_elem.set("assetRef", str(bcr.asset_ref))
                bcr_elem.set("ratio", str(bcr.bcr))

                bcr_elem.set("aalOrig", str(
                    bcr.average_annual_loss_original))

                bcr_elem.set("aalRetr", str(
                    bcr.average_annual_loss_retrofitted))

            nrml.write(list(root), output)
Пример #15
0
 def to_nrml(self, key, data, fname=None, fmt='%11.7E'):
     """
     :param key:
      `dmg_dist_per_asset|dmg_dist_per_taxonomy|dmg_dist_total|collapse_map`
     :param data: sequence of rows to serialize
     :fname: the path name of the output file; if None, build a name
     :returns: path name of the saved file
     """
     fname = fname or writetmp()
     node = getattr(self, key + '_node')(data)
     with open(fname, 'w') as out:
         nrml.write([node], out, fmt)
     return fname
Пример #16
0
 def to_nrml(self, key, data, fname=None, fmt=FIVEDIGITS):
     """
     :param key:
      `dmg_dist_per_asset|dmg_dist_per_taxonomy|dmg_dist_total|collapse_map`
     :param data: sequence of rows to serialize
     :fname: the path name of the output file; if None, build a name
     :returns: path name of the saved file
     """
     fname = fname or writetmp()
     node = getattr(self, key + '_node')(data)
     with open(fname, 'wb') as out:
         nrml.write([node], out, fmt)
     return fname
 def write(self, destination, source_model, name=None):
     """
     Exports to NRML
     """
     if os.path.exists(destination):
         os.remove(destination)
     self.destination = destination
     #assert isinstance(source_model, SourceModel) and len(source_model)
     if name:
         source_model.name = name
     output_source_model = LiteralNode("sourceModel", {"name": name},
                                       nodes=source_model.sources)
     print "Exporting Source Model to %s" % self.destination
     with open(self.destination, "w") as f:
         nrml.write([output_source_model], f, "%s")
Пример #18
0
def upgrade_file(path):
    """Upgrade to the latest NRML version"""
    node0 = nrml.read(path, chatty=False)[0]
    shutil.copy(path, path + '.bak')  # make a backup of the original file
    tag = striptag(node0.tag)
    if tag == 'vulnerabilityModel':
        vf_dict, cat_dict = get_vulnerability_functions_04(path)
        # below I am converting into a NRML 0.5 vulnerabilityModel
        node0 = LiteralNode(
            'vulnerabilityModel', cat_dict,
            nodes=list(map(riskmodels.obj_to_node, list(vf_dict.values()))))
    elif tag == 'fragilityModel':
        node0 = riskmodels.convert_fragility_model_04(
            nrml.read(path)[0], path)
    with open(path, 'w') as f:
        nrml.write([node0], f)
Пример #19
0
    def serialize(self, data):
        """
        Write a sequence of hazard curves to the specified file.

        :param data:
            Iterable of hazard curve data. Each datum must be an object with
            the following attributes:

            * poes: A list of probability of exceedence values (floats).
            * location: An object representing the location of the curve; must
              have `x` and `y` to represent lon and lat, respectively.
        """
        with open(self.dest, 'wb') as fh:
            root = et.Element('nrml')
            self.add_hazard_curves(root, self.metadata, data)
            nrml.write(list(root), fh)
Пример #20
0
    def serialize(self, curve_set):
        """
        Write a set of sequence of hazard curves to the specified file.
        :param curve_set:

           Iterable over sequence of curves. Each element returned by
           the iterable is an iterable suitable to be used by the
           :meth:`serialize` of the class
           :class:`openquake.commonlib.hazard_writers.HazardCurveXMLWriter`
        """
        with nrml.NRMLFile(self.dest, 'w') as fh:
            root = et.Element('nrml')
            for metadata, curve_data in zip(self.metadata_set, curve_set):
                writer = HazardCurveXMLWriter(self.dest, **metadata)
                writer.add_hazard_curves(root, metadata, curve_data)
            nrml.write(list(root), fh)
Пример #21
0
    def serialize(self, curve_set):
        """
        Write a set of sequence of hazard curves to the specified file.
        :param curve_set:

           Iterable over sequence of curves. Each element returned by
           the iterable is an iterable suitable to be used by the
           :meth:`serialize` of the class
           :class:`openquake.commonlib.hazard_writers.HazardCurveXMLWriter`
        """
        with open(self.dest, 'wb') as fh:
            root = et.Element('nrml')
            for metadata, curve_data in zip(self.metadata_set, curve_set):
                writer = HazardCurveXMLWriter(self.dest, **metadata)
                writer.add_hazard_curves(root, metadata, curve_data)
            nrml.write(list(root), fh)
Пример #22
0
    def serialize(self, data):
        """
        Write a sequence of hazard curves to the specified file.

        :param data:
            Iterable of hazard curve data. Each datum must be an object with
            the following attributes:

            * poes: A list of probability of exceedence values (floats).
            * location: An object representing the location of the curve; must
              have `x` and `y` to represent lon and lat, respectively.
        """
        with nrml.NRMLFile(self.dest, 'w') as fh:
            root = et.Element('nrml')
            self.add_hazard_curves(root, self.metadata, data)
            nrml.write(list(root), fh)
Пример #23
0
def write_source_model(dest, groups, name=None):
    """
    Writes a source model to XML.

    :param str dest:
        Destination path
    :param list groups:
        Source model as list of SourceGroups
    :param str name:
        Name of the source model (if missing, extracted from the filename)
    """
    name = name or os.path.splitext(os.path.basename(dest))[0]
    nodes = list(map(obj_to_node, sorted(groups)))
    source_model = Node("sourceModel", {"name": name}, nodes=nodes)
    with open(dest, 'wb') as f:
        nrml.write([source_model], f, '%s')
    return dest
Пример #24
0
def tidy(fnames):
    """
    Reformat a NRML file in a canonical form. That also means reducing the
    precision of the floats to a standard value. If the file is invalid,
    a clear error message is shown.
    """
    for fname in fnames:
        try:
            nodes = nrml.read(fname).nodes
        except ValueError as err:
            print(err)
            return
        with open(fname + '.bak', 'w') as f:
            f.write(open(fname).read())
        with open(fname, 'w') as f:
            nrml.write(nodes, f)
        print('Reformatted %s, original left in %s.bak' % (fname, fname))
Пример #25
0
    def serialize(self, data, fmt='%10.7E'):
        """
        Serialize a collection of ground motion fields to XML.

        :param data:
            An iterable of "GMF set" objects.
            Each "GMF set" object should:

            * have an `investigation_time` attribute
            * have an `stochastic_event_set_id` attribute
            * be iterable, yielding a sequence of "GMF" objects

            Each "GMF" object should:

            * have an `imt` attribute
            * have an `sa_period` attribute (only if `imt` is 'SA')
            * have an `sa_damping` attribute (only if `imt` is 'SA')
            * have a `rupture_id` attribute (to indicate which rupture
              contributed to this gmf)
            * be iterable, yielding a sequence of "GMF node" objects

            Each "GMF node" object should have:

            * a `gmv` attribute (to indicate the ground motion value
            * `lon` and `lat` attributes (to indicate the geographical location
              of the ground motion field)
        """
        gmf_set_nodes = []
        for gmf_set in data:
            gmf_set_node = node.Node('gmfSet')
            if gmf_set.investigation_time:
                gmf_set_node['investigationTime'] = str(
                    gmf_set.investigation_time)
            gmf_set_node['stochasticEventSetId'] = str(
                gmf_set.stochastic_event_set_id)
            gmf_set_node.nodes = gen_gmfs(gmf_set)
            gmf_set_nodes.append(gmf_set_node)

        gmf_container = node.Node('gmfCollection')
        gmf_container[SM_TREE_PATH] = self.sm_lt_path
        gmf_container[GSIM_TREE_PATH] = self.gsim_lt_path
        gmf_container.nodes = gmf_set_nodes

        with open(self.dest, 'wb') as dest:
            nrml.write([gmf_container], dest, fmt)
Пример #26
0
    def serialize(self, data, fmt='%10.7E'):
        """
        Serialize a collection of ground motion fields to XML.

        :param data:
            An iterable of "GMF set" objects.
            Each "GMF set" object should:

            * have an `investigation_time` attribute
            * have an `stochastic_event_set_id` attribute
            * be iterable, yielding a sequence of "GMF" objects

            Each "GMF" object should:

            * have an `imt` attribute
            * have an `sa_period` attribute (only if `imt` is 'SA')
            * have an `sa_damping` attribute (only if `imt` is 'SA')
            * have a `rupture_id` attribute (to indicate which rupture
              contributed to this gmf)
            * be iterable, yielding a sequence of "GMF node" objects

            Each "GMF node" object should have:

            * a `gmv` attribute (to indicate the ground motion value
            * `lon` and `lat` attributes (to indicate the geographical location
              of the ground motion field)
        """
        gmf_set_nodes = []
        for gmf_set in data:
            gmf_set_node = node.Node('gmfSet')
            if gmf_set.investigation_time:
                gmf_set_node['investigationTime'] = str(
                    gmf_set.investigation_time)
            gmf_set_node['stochasticEventSetId'] = str(
                gmf_set.stochastic_event_set_id)
            gmf_set_node.nodes = gen_gmfs(gmf_set)
            gmf_set_nodes.append(gmf_set_node)

        gmf_container = node.Node('gmfCollection')
        gmf_container[SM_TREE_PATH] = self.sm_lt_path
        gmf_container[GSIM_TREE_PATH] = self.gsim_lt_path
        gmf_container.nodes = gmf_set_nodes

        with open(self.dest, 'w') as dest:
            nrml.write([gmf_container], dest, fmt)
Пример #27
0
 def convert_to_nrml(self, out_archive=None):
     """
     From CSV files with the given prefix to .xml files; if the output
     directory is not specified, use the input archive to store the output.
     """
     fnames = []
     for man in self._getmanagers():
         with man:
             outname = man.prefix + '.xml'
             if out_archive is None:
                 out = man.archive.open(outname, 'w+')
             else:
                 out = out_archive.open(outname, 'w+')
             with out:
                 node = man.get_tableset().to_node()
                 nrml.write([node], out)
             fnames.append(out.name)
     return fnames
Пример #28
0
def tidy(fnames):
    """
    Reformat a NRML file in a canonical form. That also means reducing the
    precision of the floats to a standard value. If the file is invalid,
    a clear error message is shown.
    """
    for fname in fnames:
        try:
            node = nrml.read(fname)
        except ValueError as err:
            print(err)
            return
        with open(fname + '.bak', 'wb') as f:
            f.write(open(fname, 'rb').read())
        with open(fname, 'wb') as f:
            # make sure the xmlns i.e. the NRML version is unchanged
            nrml.write(node.nodes, f, writers.FIVEDIGITS, xmlns=node['xmlns'])
        print('Reformatted %s, original left in %s.bak' % (fname, fname))
Пример #29
0
def write_source_model(dest, sources, name=None):
    """
    Writes a source model to XML.

    :param str dest:
        Destination path
    :param list sources:
        Source model as list of instance of the
        :class:`openquake.hazardlib.source.base.BaseSeismicSource`
    :param str name:
        Name of the source model (if missing, extracted from the filename)
    """
    name = name or os.path.splitext(os.path.basename(dest))[0]
    nodes = list(map(obj_to_node, sorted(sources, key=lambda src: src.source_id)))
    source_model = LiteralNode("sourceModel", {"name": name}, nodes=nodes)
    with open(dest, 'w') as f:
        nrml.write([source_model], f, '%s')
    return dest
Пример #30
0
    def serialize(self, data):
        """
        Serialize hazard map data to XML.

        See :meth:`HazardMapWriter.serialize` for details about the expected
        input.
        """
        with open(self.dest, 'wb') as fh:
            root = et.Element('nrml')
            hazard_map = et.SubElement(root, 'hazardMap')
            _set_metadata(hazard_map, self.metadata, _ATTR_MAP)

            for lon, lat, iml in data:
                node = et.SubElement(hazard_map, 'node')
                node.set('lon', str(lon))
                node.set('lat', str(lat))
                node.set('iml', str(iml))

            nrml.write(list(root), fh)
Пример #31
0
    def serialize(self, data):
        """
        Serialize hazard map data to XML.

        See :meth:`HazardMapWriter.serialize` for details about the expected
        input.
        """
        with nrml.NRMLFile(self.dest, 'w') as fh:
            root = et.Element('nrml')
            hazard_map = et.SubElement(root, 'hazardMap')
            _set_metadata(hazard_map, self.metadata, _ATTR_MAP)

            for lon, lat, iml in data:
                node = et.SubElement(hazard_map, 'node')
                node.set('lon', str(lon))
                node.set('lat', str(lat))
                node.set('iml', str(iml))

            nrml.write(list(root), fh)
Пример #32
0
 def write(self, destination, source_model, name=None):
     """
     Exports to NRML
     """
     if os.path.exists(destination):
         os.remove(destination)
     self.destination = destination
     if name:
         source_model.name = name
     output_source_model = Node("sourceModel", {"name": name})
     dic = groupby(source_model.sources,
                   operator.itemgetter('tectonicRegion'))
     for i, (trt, srcs) in enumerate(dic.items(), 1):
         output_source_model.append(
             Node('sourceGroup',
                  {'tectonicRegion': trt, 'name': 'group %d' % i},
                  nodes=srcs))
     print("Exporting Source Model to %s" % self.destination)
     with open(self.destination, "wb") as f:
         nrml.write([output_source_model], f, "%s")
Пример #33
0
def reduce(fname, reduction_factor):
    """
    Produce a submodel from `fname` by sampling the nodes randomly.
    Supports source models, site models and exposure models. As a special
    case, it is also able to reduce .csv files by sampling the lines.
    This is a debugging utility to reduce large computations to small ones.
    """
    if fname.endswith('.csv'):
        with open(fname) as f:
            all_lines = f.readlines()
        lines = random_filter(all_lines, reduction_factor)
        shutil.copy(fname, fname + '.bak')
        print('Copied the original file in %s.bak' % fname)
        with open(fname, 'wb') as f:
            for line in lines:
                f.write(encode(line))
        print('Extracted %d lines out of %d' % (len(lines), len(all_lines)))
        return
    node = nrml.read(fname)
    model = node[0]
    if model.tag.endswith('exposureModel'):
        total = len(model.assets)
        model.assets.nodes = random_filter(model.assets, reduction_factor)
        num_nodes = len(model.assets)
    elif model.tag.endswith('siteModel'):
        total = len(model)
        model.nodes = random_filter(model, reduction_factor)
        num_nodes = len(model)
    elif model.tag.endswith('sourceModel'):
        total = len(model)
        model.nodes = random_filter(model, reduction_factor)
        num_nodes = len(model)
    else:
        raise RuntimeError('Unknown model tag: %s' % model.tag)
    shutil.copy(fname, fname + '.bak')
    print('Copied the original file in %s.bak' % fname)
    with open(fname, 'wb') as f:
        nrml.write([model], f, xmlns=node['xmlns'])
    print('Extracted %d nodes out of %d' % (num_nodes, total))
Пример #34
0
def reduce(fname, reduction_factor):
    """
    Produce a submodel from `fname` by sampling the nodes randomly.
    Supports source models, site models and exposure models. As a special
    case, it is also able to reduce .csv files by sampling the lines.
    This is a debugging utility to reduce large computations to small ones.
    """
    if fname.endswith('.csv'):
        with open(fname) as f:
            all_lines = f.readlines()
        lines = random_filter(all_lines, reduction_factor)
        shutil.copy(fname, fname + '.bak')
        print('Copied the original file in %s.bak' % fname)
        with open(fname, 'w') as f:
            for line in lines:
                f.write(line)
        print('Extracted %d lines out of %d' % (len(lines), len(all_lines)))
        return
    model, = nrml.read(fname)
    if model.tag.endswith('exposureModel'):
        total = len(model.assets)
        model.assets.nodes = random_filter(model.assets, reduction_factor)
        num_nodes = len(model.assets)
    elif model.tag.endswith('siteModel'):
        total = len(model)
        model.nodes = random_filter(model, reduction_factor)
        num_nodes = len(model)
    elif model.tag.endswith('sourceModel'):
        total = len(model)
        model.nodes = random_filter(model, reduction_factor)
        num_nodes = len(model)
    else:
        raise RuntimeError('Unknown model tag: %s' % model.tag)
    shutil.copy(fname, fname + '.bak')
    print('Copied the original file in %s.bak' % fname)
    with open(fname, 'w') as f:
        nrml.write([model], f)
    print('Extracted %d nodes out of %d' % (num_nodes, total))
Пример #35
0
def selected_event_set_to_rupture_xmls(input_ses, output_dir, selected_ids):
    """
    Parse only ruptures with the selected IDs to the output dir
    """
    if os.path.exists(output_dir):
        raise IOError("Output directory %s already exists" % output_dir)
    else:
        os.mkdir(output_dir)
    nodeset = nrml.read(input_ses, chatty=False)
    for sesc in nodeset:
        for ses in sesc:
            for rupture in ses:
                if rupture["id"] in selected_ids:
                    print "Parsing event %s" % rupture["id"]
                    if hasattr(rupture, "planarSurface"):
                        rupture_node = parse_planar_surface(rupture)
                    elif hasattr(rupture, "mesh"):
                        rupture_node = parse_mesh_surface(rupture)
                    rup_id = rupture["id"].replace("=", "_")
                    filename = os.path.join(output_dir,
                                            rup_id.replace("|", "_") + ".xml")
                    with open(filename, "w") as f:
                        nrml.write([rupture_node], f, "%s")
Пример #36
0
    if autoheader:
        all_fields = [col.split(':', 1)[0].split('-') for col in autoheader]
        for record in data:
            row = []
            for fields in all_fields:
                row.append(extract_from(record, fields))
            dest.write(
                sep.join(scientificformat(col, fmt) for col in row) + u'\n')
    else:
        for row in data:
            dest.write(
                sep.join(scientificformat(col, fmt) for col in row) + u'\n')
    if hasattr(dest, 'getvalue'):
        return dest.getvalue()[:-1]  # a newline is strangely added
    else:
        dest.close()
    return dest.name


if __name__ == '__main__':  # pretty print of NRML files
    import sys
    import shutil
    from openquake.commonlib import nrml
    nrmlfiles = sys.argv[1:]
    for fname in nrmlfiles:
        node = nrml.read(fname)
        shutil.copy(fname, fname + '.bak')
        with open(fname, 'w') as out:
            nrml.write(list(node), out)
def fix_source_node(node):
    if node.tag.endswith('complexFaultSource'):
        geom = node.complexFaultGeometry
        top = geom.faultTopEdge
        intermediate = [edge for edge in geom.getnodes('intermediateEdge')]
        bottom = geom.faultBottomEdge
        edges = map(make_edge, [top] + intermediate + [bottom])
        try:
            ComplexFaultSurface.from_fault_data(edges, mesh_spacing=4.)
        except ValueError as excp:
            if AKI_RICH_ERR_MSG in str(excp):
                print str(excp)
                print 'Reverting edges ...'
                reverse(geom.faultTopEdge)
                reverse(geom.faultBottomEdge)
            elif WRONG_ORDER_ERR_MSG in str(excp):
                print str(excp)
                print 'reverting bottom edge ...'
                reverse(geom.faultBottomEdge)
            else:
                raise

if __name__ == '__main__':
    fname = sys.argv[1]
    src_model = node_from_xml(fname).sourceModel
    for node in src_model:
        fix_source_node(node)
    with open(fname, 'w') as f:
        nrml.write([src_model], f)
Пример #38
0
def fix_source_node(node):
    if node.tag.endswith('complexFaultSource'):
        geom = node.complexFaultGeometry
        top = geom.faultTopEdge
        intermediate = [edge for edge in geom.getnodes('intermediateEdge')]
        bottom = geom.faultBottomEdge
        edges = map(make_edge, [top] + intermediate + [bottom])
        try:
            ComplexFaultSurface.from_fault_data(edges, mesh_spacing=4.)
        except ValueError as excp:
            if AKI_RICH_ERR_MSG in str(excp):
                print(excp)
                print('Reverting edges ...')
                reverse(geom.faultTopEdge)
                reverse(geom.faultBottomEdge)
            elif WRONG_ORDER_ERR_MSG in str(excp):
                print(excp)
                print('reverting bottom edge ...')
                reverse(geom.faultBottomEdge)
            else:
                raise

if __name__ == '__main__':
    fname = sys.argv[1]
    src_model = node_from_xml(fname).sourceModel
    for src_node in src_model:
        fix_source_node(src_node)
    with open(fname, 'wb') as f:
        nrml.write([src_model], f, xmlns=nrml.NAMESPACE)
Пример #39
0
    def serialize(self, data):
        """
        Serialize a collection of stochastic event sets to XML.

        :param data:
            An iterable of "SES" ("Stochastic Event Set") objects.
            Each "SES" object should:

            * have an `investigation_time` attribute
            * have an `ordinal` attribute
            * be iterable, yielding a sequence of "rupture" objects

            Each rupture" should have the following attributes:
            * `etag`
            * `magnitude`
            * `strike`
            * `dip`
            * `rake`
            * `tectonic_region_type`
            * `is_from_fault_source` (a `bool`)
            * `is_multi_surface` (a `bool`)
            * `lons`
            * `lats`
            * `depths`

            If `is_from_fault_source` is `True`, the rupture originated from a
            simple or complex fault sources. In this case, `lons`, `lats`, and
            `depths` should all be 2D arrays (of uniform shape). These
            coordinate triples represent nodes of the rupture mesh.

            If `is_from_fault_source` is `False`, the rupture originated from a
            point or area source. In this case, the rupture is represented by a
            quadrilateral planar surface. This planar surface is defined by 3D
            vertices. In this case, the rupture should have the following
            attributes:

            * `top_left_corner`
            * `top_right_corner`
            * `bottom_right_corner`
            * `bottom_left_corner`

            Each of these should be a triple of `lon`, `lat`, `depth`.

            If `is_multi_surface` is `True`, the rupture originated from a
            multi-surface source. In this case, `lons`, `lats`, and `depths`
            should have uniform length. The length should be a multiple of 4,
            where each segment of 4 represents the corner points of a planar
            surface in the following order:

            * top left
            * top right
            * bottom left
            * bottom right

            Each of these should be a triple of `lon`, `lat`, `depth`.
        """
        with open(self.dest, 'wb') as fh:
            root = et.Element('nrml')
            ses_container = et.SubElement(
                root, 'stochasticEventSetCollection')
            for ses in data:
                ruptures = list(ses)
                if not ruptures:  # empty SES, don't export it
                    continue
                ses_elem = et.SubElement(
                    ses_container, 'stochasticEventSet')
                ses_elem.set('id', str(ses.ordinal or 1))
                ses_elem.set('investigationTime', str(ses.investigation_time))
                for rupture in ruptures:
                    rupture_to_element(rupture, ses_elem)
            nrml.write(list(root), fh)
Пример #40
0
    def serialize(self, data):
        """
        Serialize a collection of loss curves.

        :param data:
            An iterable of loss curve objects. Each object should:

            * define an attribute `location`, which is itself an object
              defining two attributes, `x` containing the longitude value
              and `y` containing the latitude value.
            * define an attribute `asset_ref`, which contains the unique
              identifier of the asset related to the loss curve.
            * define an attribute `poes`, which is a list of floats
              describing the probabilities of exceedance.
            * define an attribute `losses`, which is a list of floats
              describing the losses.
            * define an attribute `loss_ratios`, which is a list of floats
              describing the loss ratios.
            * define an attribute `average_loss`, which is a float
              describing the average loss associated to the loss curve
            * define an attribute `stddev_loss`, which is a float
              describing the standard deviation of losses if the loss curve
              has been computed with an event based approach. Otherwise,
              it is None

            All attributes must be defined, except for `loss_ratios` that
            can be `None` since it is optional in the schema.

            Also, `poes`, `losses` and `loss_ratios` values must be indexed
            coherently, i.e.: the loss (and optionally loss ratio) at index
            zero is related to the probability of exceedance at the same
            index.
        """

        _assert_valid_input(data)

        with open(self._dest, 'wb') as output:
            root = et.Element("nrml")

            for curve in data:
                if self._loss_curves is None:
                    self._create_loss_curves_elem(root)

                loss_curve = et.SubElement(self._loss_curves, "lossCurve")

                _append_location(loss_curve, curve.location)
                loss_curve.set("assetRef", curve.asset_ref)

                poes = et.SubElement(loss_curve, "poEs")
                poes.text = " ".join(FIVEDIGITS % p for p in curve.poes
                                     if notnan(p))

                losses = et.SubElement(loss_curve, "losses")
                losses.text = " ".join(FIVEDIGITS % p for p in curve.losses
                                       if notnan(p))

                if curve.loss_ratios is not None:
                    loss_ratios = et.SubElement(loss_curve, "lossRatios")

                    loss_ratios.text = " ".join(
                        ['%.3f' % p for p in curve.loss_ratios if notnan(p)])

                losses = et.SubElement(loss_curve, "averageLoss")
                losses.text = FIVEDIGITS % curve.average_loss

                if curve.stddev_loss is not None:
                    losses = et.SubElement(loss_curve, "stdDevLoss")
                    losses.text = FIVEDIGITS % curve.stddev_loss

            nrml.write(list(root), output)
Пример #41
0
    def serialize(self, data):
        """
        Serialize an aggregation loss curve.

        :param data:
            An object representing an aggregate loss curve. This object should:

            * define an attribute `poes`, which is a list of floats
              describing the probabilities of exceedance.
            * define an attribute `losses`, which is a list of floats
              describing the losses.
            * define an attribute `average_loss`, which is a float
              describing the average loss associated to the loss curve
            * define an attribute `stddev_loss`, which is a float
              describing the standard deviation of losses if the loss curve
              has been computed with an event based approach. Otherwise, it
              is None

            Also, `poes`, `losses` values must be indexed coherently,
            i.e.: the loss at index zero is related to the probability
            of exceedance at the same index.
        """

        if data is None:
            raise ValueError("You can not serialize an empty document")

        with open(self._dest, 'wb') as output:
            root = et.Element("nrml")

            aggregate_loss_curve = et.SubElement(root, "aggregateLossCurve")

            aggregate_loss_curve.set("investigationTime",
                                     str(self._investigation_time))

            aggregate_loss_curve.set("riskInvestigationTime",
                                     str(self._risk_investigation_time))

            if self._source_model_tree_path is not None:
                aggregate_loss_curve.set("sourceModelTreePath",
                                         str(self._source_model_tree_path))

            if self._gsim_tree_path is not None:
                aggregate_loss_curve.set("gsimTreePath",
                                         str(self._gsim_tree_path))

            if self._statistics is not None:
                aggregate_loss_curve.set("statistics", str(self._statistics))

            if self._quantile_value is not None:
                aggregate_loss_curve.set("quantileValue",
                                         str(self._quantile_value))

            if self._unit is not None:
                aggregate_loss_curve.set("unit", str(self._unit))

            aggregate_loss_curve.set("lossType", self._loss_type)

            poes = et.SubElement(aggregate_loss_curve, "poEs")
            poes.text = " ".join(FIVEDIGITS % p for p in data.poes)

            losses = et.SubElement(aggregate_loss_curve, "losses")
            losses.text = " ".join([FIVEDIGITS % p for p in data.losses])

            losses = et.SubElement(aggregate_loss_curve, "averageLoss")
            losses.text = FIVEDIGITS % data.average_loss

            if data.stddev_loss is not None:
                losses = et.SubElement(aggregate_loss_curve, "stdDevLoss")
                losses.text = FIVEDIGITS % data.stddev_loss

            nrml.write(list(root), output)
Пример #42
0
def fix_source_node(node):
    if node.tag.endswith('complexFaultSource'):
        geom = node.complexFaultGeometry
        top = geom.faultTopEdge
        intermediate = [edge for edge in geom.getnodes('intermediateEdge')]
        bottom = geom.faultBottomEdge
        edges = map(make_edge, [top] + intermediate + [bottom])
        try:
            ComplexFaultSurface.from_fault_data(edges, mesh_spacing=4.)
        except ValueError as excp:
            if AKI_RICH_ERR_MSG in str(excp):
                print str(excp)
                print 'Reverting edges ...'
                reverse(geom.faultTopEdge)
                reverse(geom.faultBottomEdge)
            elif WRONG_ORDER_ERR_MSG in str(excp):
                print str(excp)
                print 'reverting bottom edge ...'
                reverse(geom.faultBottomEdge)
            else:
                raise

if __name__ == '__main__':
    fname = sys.argv[1]
    src_model = node_from_xml(fname).sourceModel
    for node in src_model:
        fix_source_node(node)
    with open(fname, 'w') as f:
        nrml.write([src_model], f)
Пример #43
0
# this is simple and without error checking for the moment
def read_array(fname, sep=','):
    r"""
    Convert a CSV file without header into a numpy array of floats.

    >>> from openquake.baselib.general import writetmp
    >>> print read_array(writetmp('.1 .2, .3 .4, .5 .6\n'))
    [[[ 0.1  0.2]
      [ 0.3  0.4]
      [ 0.5  0.6]]]
    """
    with open(fname) as f:
        records = []
        for line in f:
            row = line.split(sep)
            record = [list(map(float, col.split())) for col in row]
            records.append(record)
        return numpy.array(records)


if __name__ == '__main__':  # pretty print of NRML files
    import sys
    import shutil
    from openquake.commonlib import nrml
    nrmlfiles = sys.argv[1:]
    for fname in nrmlfiles:
        node = nrml.read(fname)
        shutil.copy(fname, fname + '.bak')
        with open(fname, 'w') as out:
            nrml.write(list(node), out)
Пример #44
0
    def serialize(self, data):
        """
        Serialize a collection of loss curves.

        :param data:
            An iterable of loss curve objects. Each object should:

            * define an attribute `location`, which is itself an object
              defining two attributes, `x` containing the longitude value
              and `y` containing the latitude value.
            * define an attribute `asset_ref`, which contains the unique
              identifier of the asset related to the loss curve.
            * define an attribute `poes`, which is a list of floats
              describing the probabilities of exceedance.
            * define an attribute `losses`, which is a list of floats
              describing the losses.
            * define an attribute `loss_ratios`, which is a list of floats
              describing the loss ratios.
            * define an attribute `average_loss`, which is a float
              describing the average loss associated to the loss curve
            * define an attribute `stddev_loss`, which is a float
              describing the standard deviation of losses if the loss curve
              has been computed with an event based approach. Otherwise,
              it is None

            All attributes must be defined, except for `loss_ratios` that
            can be `None` since it is optional in the schema.

            Also, `poes`, `losses` and `loss_ratios` values must be indexed
            coherently, i.e.: the loss (and optionally loss ratio) at index
            zero is related to the probability of exceedance at the same
            index.
        """

        _assert_valid_input(data)

        with NRMLFile(self._dest, 'w') as output:
            root = et.Element("nrml")

            for curve in data:
                if self._loss_curves is None:
                    self._create_loss_curves_elem(root)

                loss_curve = et.SubElement(self._loss_curves, "lossCurve")

                _append_location(loss_curve, curve.location)
                loss_curve.set("assetRef", curve.asset_ref)

                poes = et.SubElement(loss_curve, "poEs")
                poes.text = " ".join(FIVEDIGITS % p for p in curve.poes
                                     if notnan(p))

                losses = et.SubElement(loss_curve, "losses")
                losses.text = " ".join(FIVEDIGITS % p for p in curve.losses
                                       if notnan(p))

                if curve.loss_ratios is not None:
                    loss_ratios = et.SubElement(loss_curve, "lossRatios")

                    loss_ratios.text = " ".join(
                        ['%.3f' % p for p in curve.loss_ratios if notnan(p)])

                losses = et.SubElement(loss_curve, "averageLoss")
                losses.text = FIVEDIGITS % curve.average_loss

                if curve.stddev_loss is not None:
                    losses = et.SubElement(loss_curve, "stdDevLoss")
                    losses.text = FIVEDIGITS % curve.stddev_loss

            nrml.write(list(root), output)
Пример #45
0
    def serialize(self, data):
        """
        Serialize a collection of stochastic event sets to XML.

        :param data:
            An iterable of "SES" ("Stochastic Event Set") objects.
            Each "SES" object should:

            * have an `investigation_time` attribute
            * have an `ordinal` attribute
            * be iterable, yielding a sequence of "rupture" objects

            Each rupture" should have the following attributes:
            * `tag`
            * `magnitude`
            * `strike`
            * `dip`
            * `rake`
            * `tectonic_region_type`
            * `is_from_fault_source` (a `bool`)
            * `is_multi_surface` (a `bool`)
            * `lons`
            * `lats`
            * `depths`

            If `is_from_fault_source` is `True`, the rupture originated from a
            simple or complex fault sources. In this case, `lons`, `lats`, and
            `depths` should all be 2D arrays (of uniform shape). These
            coordinate triples represent nodes of the rupture mesh.

            If `is_from_fault_source` is `False`, the rupture originated from a
            point or area source. In this case, the rupture is represented by a
            quadrilateral planar surface. This planar surface is defined by 3D
            vertices. In this case, the rupture should have the following
            attributes:

            * `top_left_corner`
            * `top_right_corner`
            * `bottom_right_corner`
            * `bottom_left_corner`

            Each of these should be a triple of `lon`, `lat`, `depth`.

            If `is_multi_surface` is `True`, the rupture originated from a
            multi-surface source. In this case, `lons`, `lats`, and `depths`
            should have uniform length. The length should be a multiple of 4,
            where each segment of 4 represents the corner points of a planar
            surface in the following order:

            * top left
            * top right
            * bottom left
            * bottom right

            Each of these should be a triple of `lon`, `lat`, `depth`.
        """
        with nrml.NRMLFile(self.dest, 'w') as fh:
            root = et.Element('nrml')
            ses_container = et.SubElement(root, 'stochasticEventSetCollection')
            ses_container.set(SM_TREE_PATH, self.sm_lt_path)
            for ses in data:
                ruptures = list(ses)
                if not ruptures:  # empty SES, don't export it
                    continue
                ses_elem = et.SubElement(ses_container, 'stochasticEventSet')
                ses_elem.set('id', str(ses.ordinal or 1))
                ses_elem.set('investigationTime', str(ses.investigation_time))
                for rupture in ruptures:
                    rupture_to_element(rupture, ses_elem)

            nrml.write(list(root), fh)
Пример #46
0
    def serialize(self, data):
        """
        Serialize an aggregation loss curve.

        :param data:
            An object representing an aggregate loss curve. This object should:

            * define an attribute `poes`, which is a list of floats
              describing the probabilities of exceedance.
            * define an attribute `losses`, which is a list of floats
              describing the losses.
            * define an attribute `average_loss`, which is a float
              describing the average loss associated to the loss curve
            * define an attribute `stddev_loss`, which is a float
              describing the standard deviation of losses if the loss curve
              has been computed with an event based approach. Otherwise, it
              is None

            Also, `poes`, `losses` values must be indexed coherently,
            i.e.: the loss at index zero is related to the probability
            of exceedance at the same index.
        """

        if data is None:
            raise ValueError("You can not serialize an empty document")

        with NRMLFile(self._dest, 'wb') as output:
            root = et.Element("nrml")

            aggregate_loss_curve = et.SubElement(root, "aggregateLossCurve")

            aggregate_loss_curve.set("investigationTime",
                                     str(self._investigation_time))

            aggregate_loss_curve.set("riskInvestigationTime",
                                     str(self._risk_investigation_time))

            if self._source_model_tree_path is not None:
                aggregate_loss_curve.set("sourceModelTreePath",
                                         str(self._source_model_tree_path))

            if self._gsim_tree_path is not None:
                aggregate_loss_curve.set("gsimTreePath",
                                         str(self._gsim_tree_path))

            if self._statistics is not None:
                aggregate_loss_curve.set("statistics", str(self._statistics))

            if self._quantile_value is not None:
                aggregate_loss_curve.set("quantileValue",
                                         str(self._quantile_value))

            if self._unit is not None:
                aggregate_loss_curve.set("unit", str(self._unit))

            aggregate_loss_curve.set("lossType", self._loss_type)

            poes = et.SubElement(aggregate_loss_curve, "poEs")
            poes.text = " ".join(FIVEDIGITS % p for p in data.poes)

            losses = et.SubElement(aggregate_loss_curve, "losses")
            losses.text = " ".join([FIVEDIGITS % p for p in data.losses])

            losses = et.SubElement(aggregate_loss_curve, "averageLoss")
            losses.text = FIVEDIGITS % data.average_loss

            if data.stddev_loss is not None:
                losses = et.SubElement(aggregate_loss_curve, "stdDevLoss")
                losses.text = FIVEDIGITS % data.stddev_loss

            nrml.write(list(root), output)