def serialize(self, data): """ Serialize hazard map data to XML. See :meth:`HazardMapWriter.serialize` for details about the expected input. """ with NRMLFile(self.dest, 'w') as fh: root = etree.Element('nrml', nsmap=openquake.nrmllib.SERIALIZE_NS_MAP) hazard_map = etree.SubElement(root, 'hazardMap') _set_metadata(hazard_map, self.metadata, _ATTR_MAP) for lon, lat, iml in data: node = etree.SubElement(hazard_map, 'node') node.set('lon', str(lon)) node.set('lat', str(lat)) node.set('iml', str(iml)) fh.write( etree.tostring(root, pretty_print=True, xml_declaration=True, encoding='UTF-8'))
def serialize(self, src_model): """ Write a source model to the target file. :param src_model: A :class:`openquake.nrmllib.models.SourceModel` object, which is an iterable collection of sources. """ with NRMLFile(self.dest, 'w') as fh: root = etree.Element('nrml', nsmap=openquake.nrmllib.SERIALIZE_NS_MAP) src_model_elem = etree.SubElement(root, 'sourceModel') if src_model.name is not None: src_model_elem.set('name', src_model.name) for src in src_model: if isinstance(src, models.AreaSource): self._append_area(src_model_elem, src) elif isinstance(src, models.PointSource): self._append_point(src_model_elem, src) elif isinstance(src, models.ComplexFaultSource): self._append_complex(src_model_elem, src) elif isinstance(src, models.SimpleFaultSource): self._append_simple(src_model_elem, src) elif isinstance(src, models.CharacteristicSource): self._append_characteristic(src_model_elem, src) fh.write( etree.tostring(root, pretty_print=True, xml_declaration=True, encoding='UTF-8'))
def serialize(self, data): """ Serialize loss map data to a file as a GeoJSON feature collection. See :meth:`LossMapWriter.serialize` for expected input. """ _assert_valid_input(data) feature_coll = { 'type': 'FeatureCollection', 'features': [], 'oqtype': 'LossMap', # TODO(LB): should we instead use the # openquake.nrmllib.__version__? 'oqnrmlversion': '0.4', 'oqmetadata': self._create_oqmetadata(), } for loss in data: loc = loss.location loss_node = self._loss_nodes.get(loc.wkt) if loss_node is None: loss_node = { 'type': 'Feature', 'geometry': { 'type': 'Point', 'coordinates': [float(loc.x), float(loc.y)] }, 'properties': { 'losses': [] }, } self._loss_nodes[loss.location.wkt] = loss_node feature_coll['features'].append(loss_node) if loss.std_dev is not None: loss_node['properties']['losses'].append({ 'assetRef': str(loss.asset_ref), 'mean': str(loss.value), 'stdDev': str(loss.std_dev), }) else: loss_node['properties']['losses'].append({ 'assetRef': str(loss.asset_ref), 'value': str(loss.value), }) with NRMLFile(self._dest, 'w') as fh: fh.write(json.dumps(feature_coll))
def serialize(self, data): """ Write the hazard curves to the given as GeoJSON. The GeoJSON format is customized to contain various bits of metadata. See :meth:`HazardCurveXMLWriter.serialize` for expected input. """ oqmetadata = {} for key, value in self.metadata.iteritems(): if key == 'imls': oqmetadata['IMLs'] = value if value is not None: if key == 'imls': oqmetadata['IMLs'] = value else: oqmetadata[_ATTR_MAP.get(key)] = str(value) features = [] feature_coll = { 'type': 'FeatureCollection', 'features': features, 'oqtype': 'HazardCurve', 'oqnrmlversion': '0.4', 'oqmetadata': oqmetadata, } for hc in data: poes = list(hc.poes) lon = hc.location.x lat = hc.location.y feature = { 'type': 'Feature', 'geometry': { 'type': 'Point', 'coordinates': [float(lon), float(lat)], }, 'properties': { 'poEs': list(poes) }, } features.append(feature) with NRMLFile(self.dest, 'w') as fh: json.dump(feature_coll, fh, sort_keys=True, indent=4, separators=(',', ': '))
def serialize(self, data): """ Serialize loss map data to XML. See :meth:`LossMapWriter.serialize` for expected input. """ _assert_valid_input(data) with NRMLFile(self._dest, 'w') as output: root = etree.Element("nrml", nsmap=openquake.nrmllib.SERIALIZE_NS_MAP) for loss in data: if self._loss_map is None: self._create_loss_map_elem(root) # FIXME(lp). This implementation clearly implies that # all the map data will be stored into memory. So, it # will never scale well loss_node = self._loss_nodes.get(loss.location.wkt) if loss_node is None: loss_node = etree.SubElement(self._loss_map, "node") _append_location(loss_node, loss.location) self._loss_nodes[loss.location.wkt] = loss_node loss_elem = etree.SubElement(loss_node, "loss") loss_elem.set("assetRef", str(loss.asset_ref)) if loss.std_dev is not None: loss_elem.set("mean", str(loss.value)) loss_elem.set("stdDev", str(loss.std_dev)) else: loss_elem.set("value", str(loss.value)) output.write( etree.tostring(root, pretty_print=True, xml_declaration=True, encoding="UTF-8"))
def serialize(self, data): """ Serialize hazard map data to GeoJSON. See :meth:`HazardMapWriter.serialize` for details about the expected input. """ oqmetadata = {} for key, value in self.metadata.iteritems(): if value is not None: oqmetadata[_ATTR_MAP.get(key)] = str(value) features = [] feature_coll = { 'type': 'FeatureCollection', 'features': features, 'oqtype': 'HazardMap', 'oqnrmlversion': '0.4', 'oqmetadata': oqmetadata, } for lon, lat, iml in data: feature = { 'type': 'Feature', 'geometry': { 'type': 'Point', 'coordinates': [float(lon), float(lat)], }, 'properties': { 'iml': float(iml) }, } features.append(feature) with NRMLFile(self.dest, 'w') as fh: json.dump(feature_coll, fh, sort_keys=True, indent=4, separators=(',', ': '))
def serialize(self, data): """ Write a sequence of uniform hazard spectra to the specified file. :param data: Iterable of UHS data. Each datum must be an object with the following attributes: * imls: A sequence of Itensity Measure Levels * location: An object representing the location of the curve; must have `x` and `y` to represent lon and lat, respectively. """ gml_ns = openquake.nrmllib.SERIALIZE_NS_MAP['gml'] with NRMLFile(self.dest, 'w') as fh: root = etree.Element('nrml', nsmap=openquake.nrmllib.SERIALIZE_NS_MAP) uh_spectra = etree.SubElement(root, 'uniformHazardSpectra') _set_metadata(uh_spectra, self.metadata, _ATTR_MAP) periods_elem = etree.SubElement(uh_spectra, 'periods') periods_elem.text = ' '.join( [str(x) for x in self.metadata['periods']]) for uhs in data: uhs_elem = etree.SubElement(uh_spectra, 'uhs') gml_point = etree.SubElement(uhs_elem, '{%s}Point' % gml_ns) gml_pos = etree.SubElement(gml_point, '{%s}pos' % gml_ns) gml_pos.text = '%s %s' % (uhs.location.x, uhs.location.y) imls_elem = etree.SubElement(uhs_elem, 'IMLs') imls_elem.text = ' '.join([str(x) for x in uhs.imls]) fh.write( etree.tostring(root, pretty_print=True, xml_declaration=True, encoding='UTF-8'))
def serialize(self, curve_set): """ Write a set of sequence of hazard curves to the specified file. :param curve_set: Iterable over sequence of curves. Each element returned by the iterable is an iterable suitable to be used by the :meth:`serialize` of the class :class:`openquake.nrmllib.hazard.writers.HazardCurveXMLWriter` """ with NRMLFile(self.dest, 'w') as fh: root = etree.Element('nrml', nsmap=openquake.nrmllib.SERIALIZE_NS_MAP) for metadata, curve_data in zip(self.metadata_set, curve_set): writer = HazardCurveXMLWriter(self.dest, **metadata) writer.add_hazard_curves(root, metadata, curve_data) fh.write( etree.tostring(root, pretty_print=True, xml_declaration=True, encoding='UTF-8'))
def serialize(self, data): """ Write a sequence of hazard curves to the specified file. :param data: Iterable of hazard curve data. Each datum must be an object with the following attributes: * poes: A list of probability of exceedence values (floats). * location: An object representing the location of the curve; must have `x` and `y` to represent lon and lat, respectively. """ with NRMLFile(self.dest, 'w') as fh: root = etree.Element('nrml', nsmap=openquake.nrmllib.SERIALIZE_NS_MAP) self.add_hazard_curves(root, self.metadata, data) fh.write( etree.tostring(root, pretty_print=True, xml_declaration=True, encoding='UTF-8'))
def serialize(self, data): """ Serialize loss map data to a file as a GeoJSON feature collection. See :meth:`LossMapWriter.serialize` for expected input. """ _assert_valid_input(data) feature_coll = { 'type': 'FeatureCollection', 'features': [], 'oqtype': 'LossMap', # TODO(LB): should we instead use the # openquake.nrmllib.__version__? 'oqnrmlversion': '0.4', 'oqmetadata': self._create_oqmetadata(), } for loss in data: loc = loss.location loss_node = { 'type': 'Feature', 'geometry': { 'type': 'Point', 'coordinates': [float(loc.x), float(loc.y)] }, 'properties': {'loss': float(loss.value), 'asset_ref': loss.asset_ref}, } feature_coll['features'].append(loss_node) if loss.std_dev is not None: loss_node['properties']['std_dev'] = float(loss.std_dev) with NRMLFile(self._dest, 'w') as fh: json.dump(feature_coll, fh, sort_keys=True, indent=4, separators=(',', ': '))
def serialize(self, data): """ Serialize loss map data to XML. See :meth:`LossMapWriter.serialize` for expected input. """ _assert_valid_input(data) with NRMLFile(self._dest, 'w') as output: root = etree.Element("nrml", nsmap=openquake.nrmllib.SERIALIZE_NS_MAP) loss_map_el = self._create_loss_map_elem(root) current_location = None current_node = None for loss in data: if (current_location is None or loss.location.wkt != current_location): current_node = etree.SubElement(loss_map_el, "node") current_location = _append_location( current_node, loss.location) loss_elem = etree.SubElement(current_node, "loss") loss_elem.set("assetRef", str(loss.asset_ref)) if loss.std_dev is not None: loss_elem.set("mean", str(loss.value)) loss_elem.set("stdDev", str(loss.std_dev)) else: loss_elem.set("value", str(loss.value)) output.write(etree.tostring( root, pretty_print=True, xml_declaration=True, encoding="UTF-8"))
def serialize(self, data): """ Serialize a collection of loss curves. :param data: An iterable of loss curve objects. Each object should: * define an attribute `location`, which is itself an object defining two attributes, `x` containing the longitude value and `y` containing the latitude value. * define an attribute `asset_ref`, which contains the unique identifier of the asset related to the loss curve. * define an attribute `poes`, which is a list of floats describing the probabilities of exceedance. * define an attribute `losses`, which is a list of floats describing the losses. * define an attribute `loss_ratios`, which is a list of floats describing the loss ratios. * define an attribute `average_loss`, which is a float describing the average loss associated to the loss curve * define an attribute `stddev_loss`, which is a float describing the standard deviation of losses if the loss curve has been computed with an event based approach. Otherwise, it is None All attributes must be defined, except for `loss_ratios` that can be `None` since it is optional in the schema. Also, `poes`, `losses` and `loss_ratios` values must be indexed coherently, i.e.: the loss (and optionally loss ratio) at index zero is related to the probability of exceedance at the same index. """ _assert_valid_input(data) with NRMLFile(self._dest, 'w') as output: root = etree.Element("nrml", nsmap=openquake.nrmllib.SERIALIZE_NS_MAP) for curve in data: if self._loss_curves is None: self._create_loss_curves_elem(root) loss_curve = etree.SubElement(self._loss_curves, "lossCurve") _append_location(loss_curve, curve.location) loss_curve.set("assetRef", curve.asset_ref) poes = etree.SubElement(loss_curve, "poEs") poes.text = " ".join([str(p) for p in curve.poes]) losses = etree.SubElement(loss_curve, "losses") losses.text = " ".join([str(p) for p in curve.losses]) if curve.loss_ratios is not None: loss_ratios = etree.SubElement(loss_curve, "lossRatios") loss_ratios.text = " ".join( [str(p) for p in curve.loss_ratios]) losses = etree.SubElement(loss_curve, "averageLoss") losses.text = "%.4e" % curve.average_loss if curve.stddev_loss is not None: losses = etree.SubElement(loss_curve, "stdDevLoss") losses.text = "%.4e" % curve.stddev_loss output.write(etree.tostring( root, pretty_print=True, xml_declaration=True, encoding="UTF-8"))
def serialize(self, total_fractions, locations_fractions): """ Actually serialize the fractions. :param dict total_fractions: maps a value of `variable` with a tuple representing the absolute losses and the fraction :param dict locations_fractions: a dictionary mapping a tuple (longitude, latitude) to bins. Each bin is a dictionary with the same structure of `total_fractions`. """ def write_bins(parent, bin_data): for value, (absolute_loss, fraction) in bin_data.items(): bin_element = etree.SubElement(parent, "bin") bin_element.set("value", str(value)) bin_element.set("absoluteLoss", "%.4e" % absolute_loss) bin_element.set("fraction", "%.5f" % fraction) with NRMLFile(self.dest, 'w') as output: root = etree.Element( "nrml", nsmap=openquake.nrmllib.SERIALIZE_NS_MAP) # container element container = etree.SubElement(root, "lossFraction") container.set("investigationTime", "%.2f" % self.hazard_metadata.investigation_time) if self.poe is not None: container.set("poE", "%.4f" % self.poe) container.set( "sourceModelTreePath", self.hazard_metadata.sm_path or "") container.set("gsimTreePath", self.hazard_metadata.gsim_path or "") if self.hazard_metadata.statistics is not None: container.set("statistics", self.hazard_metadata.statistics) if self.hazard_metadata.quantile is not None: container.set( "quantileValue", "%.4f" % self.hazard_metadata.quantile) container.set("lossCategory", self.loss_category) container.set("unit", self.loss_unit) container.set("variable", self.variable) container.set("lossType", self.loss_type) # total fractions total = etree.SubElement(container, "total") write_bins(total, total_fractions) # map map_element = etree.SubElement(container, "map") for lonlat, bin_data in locations_fractions.iteritems(): node_element = etree.SubElement(map_element, "node") node_element.set("lon", str(lonlat[0])) node_element.set("lat", str(lonlat[1])) write_bins(node_element, bin_data) output.write(etree.tostring( root, pretty_print=True, xml_declaration=True, encoding="UTF-8"))
def serialize(self, data): """ Serialize an aggregation loss curve. :param data: An object representing an aggregate loss curve. This object should: * define an attribute `poes`, which is a list of floats describing the probabilities of exceedance. * define an attribute `losses`, which is a list of floats describing the losses. * define an attribute `average_loss`, which is a float describing the average loss associated to the loss curve * define an attribute `stddev_loss`, which is a float describing the standard deviation of losses if the loss curve has been computed with an event based approach. Otherwise, it is None Also, `poes`, `losses` values must be indexed coherently, i.e.: the loss at index zero is related to the probability of exceedance at the same index. """ if data is None: raise ValueError("You can not serialize an empty document") with NRMLFile(self._dest, 'w') as output: root = etree.Element("nrml", nsmap=openquake.nrmllib.SERIALIZE_NS_MAP) aggregate_loss_curve = etree.SubElement(root, "aggregateLossCurve") aggregate_loss_curve.set("investigationTime", str(self._investigation_time)) if self._source_model_tree_path is not None: aggregate_loss_curve.set("sourceModelTreePath", str(self._source_model_tree_path)) if self._gsim_tree_path is not None: aggregate_loss_curve.set("gsimTreePath", str(self._gsim_tree_path)) if self._statistics is not None: aggregate_loss_curve.set("statistics", str(self._statistics)) if self._quantile_value is not None: aggregate_loss_curve.set("quantileValue", str(self._quantile_value)) if self._unit is not None: aggregate_loss_curve.set("unit", str(self._unit)) aggregate_loss_curve.set("lossType", self._loss_type) poes = etree.SubElement(aggregate_loss_curve, "poEs") poes.text = " ".join([str(p) for p in data.poes]) losses = etree.SubElement(aggregate_loss_curve, "losses") losses.text = " ".join(["%.4f" % p for p in data.losses]) losses = etree.SubElement(aggregate_loss_curve, "averageLoss") losses.text = "%.4e" % data.average_loss if data.stddev_loss is not None: losses = etree.SubElement(aggregate_loss_curve, "stdDevLoss") losses.text = "%.4e" % data.stddev_loss output.write(etree.tostring( root, pretty_print=True, xml_declaration=True, encoding="UTF-8"))
def serialize(self, data): """ :param data: A sequence of data where each datum has the following attributes: * matrix: N-dimensional numpy array containing the disaggregation histogram. * dim_labels: A list of strings which label the dimensions of a given histogram. For example, for a Magnitude-Distance-Epsilon histogram, we would expect `dim_labels` to be ``['Mag', 'Dist', 'Eps']``. * poe: The disaggregation Probability of Exceedance level for which these results were produced. * iml: Intensity measure level, interpolated from the source hazard curve at the given ``poe``. """ with NRMLFile(self.dest, 'w') as fh: root = etree.Element('nrml', nsmap=openquake.nrmllib.SERIALIZE_NS_MAP) diss_matrices = etree.SubElement(root, 'disaggMatrices') _set_metadata(diss_matrices, self.metadata, _ATTR_MAP) transform = lambda val: ', '.join([str(x) for x in val]) _set_metadata(diss_matrices, self.metadata, self.BIN_EDGE_ATTR_MAP, transform=transform) for result in data: diss_matrix = etree.SubElement(diss_matrices, 'disaggMatrix') # Check that we have bin edges defined for each dimension label # (mag, dist, lon, lat, eps, TRT) for label in result.dim_labels: bin_edge_attr = self.DIM_LABEL_TO_BIN_EDGE_MAP.get(label) assert self.metadata.get(bin_edge_attr) is not None, ( "Writer is missing '%s' metadata" % bin_edge_attr) result_type = ','.join(result.dim_labels) diss_matrix.set('type', result_type) dims = ','.join([str(x) for x in result.matrix.shape]) diss_matrix.set('dims', dims) diss_matrix.set('poE', str(result.poe)) diss_matrix.set('iml', str(result.iml)) for idxs, value in numpy.ndenumerate(result.matrix): prob = etree.SubElement(diss_matrix, 'prob') index = ','.join([str(x) for x in idxs]) prob.set('index', index) prob.set('value', str(value)) fh.write( etree.tostring(root, pretty_print=True, xml_declaration=True, encoding='UTF-8'))
def serialize(self, data): """ Serialize a collection of stochastic event sets to XML. :param data: An iterable of "SES" ("Stochastic Event Set") objects. Each "SES" object should: * have an `investigation_time` attribute * have an `ordinal` attribute * be iterable, yielding a sequence of "rupture" objects Each rupture" should have the following attributes: * `tag` * `magnitude` * `strike` * `dip` * `rake` * `tectonic_region_type` * `is_from_fault_source` (a `bool`) * `is_multi_surface` (a `bool`) * `lons` * `lats` * `depths` If `is_from_fault_source` is `True`, the rupture originated from a simple or complex fault sources. In this case, `lons`, `lats`, and `depths` should all be 2D arrays (of uniform shape). These coordinate triples represent nodes of the rupture mesh. If `is_from_fault_source` is `False`, the rupture originated from a point or area source. In this case, the rupture is represented by a quadrilateral planar surface. This planar surface is defined by 3D vertices. In this case, the rupture should have the following attributes: * `top_left_corner` * `top_right_corner` * `bottom_right_corner` * `bottom_left_corner` Each of these should be a triple of `lon`, `lat`, `depth`. If `is_multi_surface` is `True`, the rupture originated from a multi-surface source. In this case, `lons`, `lats`, and `depths` should have uniform length. The length should be a multiple of 4, where each segment of 4 represents the corner points of a planar surface in the following order: * top left * top right * bottom left * bottom right Each of these should be a triple of `lon`, `lat`, `depth`. """ with NRMLFile(self.dest, 'w') as fh: root = etree.Element('nrml', nsmap=openquake.nrmllib.SERIALIZE_NS_MAP) ses_container = etree.SubElement(root, 'stochasticEventSetCollection') ses_container.set(SM_TREE_PATH, self.sm_lt_path) for ses in data: ruptures = list(ses) if not ruptures: # empty SES, don't export it continue ses_elem = etree.SubElement(ses_container, 'stochasticEventSet') ses_elem.set('id', str(ses.ordinal or 1)) ses_elem.set('investigationTime', str(ses.investigation_time)) for rupture in ruptures: rupture_to_element(rupture, ses_elem) fh.write( etree.tostring(root, pretty_print=True, xml_declaration=True, encoding='UTF-8'))