def setUp(self): self.loss_curve_path = helpers.get_output_path(LOSS_XML_OUTPUT_FILE) self.loss_ratio_curve_path = helpers.get_output_path( LOSS_RATIO_XML_OUTPUT_FILE) self.single_loss_curve_path = helpers.get_output_path( SINGLE_LOSS_XML_OUTPUT_FILE) self.single_loss_ratio_curve_path = helpers.get_output_path( SINGLE_LOSS_RATIO_XML_OUTPUT_FILE) self.schema_path = NRML_SCHEMA_PATH # Build up some sample loss/loss ratio curves here first_site = shapes.Site(-117.0, 38.0) second_site = shapes.Site(-118.0, 39.0) first_asset = models.ExposureData(asset_ref='a1711') second_asset = models.ExposureData(asset_ref='a1712') self.loss_curves = [(first_site, (TEST_LOSS_CURVE, first_asset)), (second_site, (TEST_LOSS_CURVE, second_asset))] self.loss_ratio_curves = [ (first_site, (TEST_LOSS_RATIO_CURVE, first_asset)), (second_site, (TEST_LOSS_RATIO_CURVE, second_asset)) ] self.single_loss_curve = [(first_site, (TEST_LOSS_CURVE, first_asset))] self.single_loss_ratio_curve = [(first_site, (TEST_LOSS_RATIO_CURVE, first_asset))] # loss curve that fails with inconsistent sites for an asset self.loss_curves_fail = [(first_site, (TEST_LOSS_CURVE, first_asset)), (second_site, (TEST_LOSS_CURVE, first_asset))]
def test_loss_serialization_with_inconsistent_site_fails(self): """Assert that serialization of illegal loss curve data raises error.""" xml_writer = risk_output.LossCurveXMLWriter( helpers.get_output_path(LOSS_XML_FAIL_OUTPUT_FILE)) self.assertRaises(ValueError, xml_writer.serialize, self.loss_curves_fail)
def _test_hazard_map_relative_scaling(region, hm_data): path = helpers.get_output_path( 'TEST_HAZARD_MAP_relative_scaling.tiff') # expected colors for each pixel in the map: exp_red_vals = numpy.array([[49, 186, 255, 138, 186, 24], [0, 208, 67, 0, 255, 24], [143, 255, 123, 186, 143, 0], [186, 0, 255, 186, 238, 143], [255, 186, 0, 186, 12, 205], [255, 143, 208, 238, 97, 0]]) exp_green_vals = numpy.array([[190, 197, 255, 236, 197, 175], [39, 216, 202, 39, 255, 175], [161, 255, 235, 197, 161, 39], [197, 39, 255, 197, 79, 161], [255, 197, 39, 197, 129, 255], [255, 161, 216, 79, 122, 39]]) exp_blue_vals = numpy.array([[255, 247, 255, 174, 247, 255], [224, 251, 255, 224, 255, 255], [241, 255, 200, 247, 241, 224], [247, 224, 255, 247, 77, 241], [255, 247, 224, 247, 248, 162], [255, 241, 251, 77, 236, 224]]) hm_writer = geotiff.HazardMapGeoTiffFile(path, small_region.grid, html_wrapper=True) hm_writer.serialize(hm_data) self._assert_image_rgb_is_correct(path, exp_red_vals, exp_green_vals, exp_blue_vals)
def test_simple_curve_plot_generation(self): """Create an SVG plot of a single (hazard) curve for a single site from a dictionary.""" test_site = shapes.Site(-122, 38) test_end_branch = '1_1' test_hc_data = { test_end_branch: { 'abscissa': [0.0, 1.0, 1.8], 'ordinate': [1.0, 0.5, 0.2], 'abscissa_property': 'PGA', 'ordinate_property': 'Probability of Exceedance', 'curve_title': 'Hazard Curve', 'Site': test_site } } path = helpers.get_output_path(HAZARDCURVE_PLOT_SIMPLE_FILENAME) plot = curve.CurvePlot(path) plot.write(test_hc_data) plot.close() # assert that file has been created and is not empty self.assertTrue(os.path.getsize(path) > 0) os.remove(path)
def test_writes_the_config_only_once(self): data = [(shapes.Site(-122.5000, 37.5000), { "IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_1", "IMLValues": [5.0, 6.0, 7.0], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [0.1, 0.2, 0.3] }), (shapes.Site(-122.4000, 37.5000), { "IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_2", "IMLValues": [5.0, 6.0, 7.0], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [0.4, 0.5, 0.6] })] path = helpers.get_output_path(TEST_FILE_CONFIG_ONCE) self._initialize_writer(path) self.writer.serialize(data) self._is_xml_valid(path)
def _test_hazard_map_relative_scaling(region, hm_data): path = helpers.get_output_path( 'TEST_HAZARD_MAP_relative_scaling.tiff') # expected colors for each pixel in the map: exp_red_vals = numpy.array([ [49, 186, 255, 138, 186, 24], [0, 208, 67, 0, 255, 24], [143, 255, 123, 186, 143, 0], [186, 0, 255, 186, 238, 143], [255, 186, 0, 186, 12, 205], [255, 143, 208, 238, 97, 0]]) exp_green_vals = numpy.array([ [190, 197, 255, 236, 197, 175], [39, 216, 202, 39, 255, 175], [161, 255, 235, 197, 161, 39], [197, 39, 255, 197, 79, 161], [255, 197, 39, 197, 129, 255], [255, 161, 216, 79, 122, 39]]) exp_blue_vals = numpy.array([ [255, 247, 255, 174, 247, 255], [224, 251, 255, 224, 255, 255], [241, 255, 200, 247, 241, 224], [247, 224, 255, 247, 77, 241], [255, 247, 224, 247, 248, 162], [255, 241, 251, 77, 236, 224]]) hm_writer = geotiff.HazardMapGeoTiffFile( path, small_region.grid, html_wrapper=True) hm_writer.serialize(hm_data) self._assert_image_rgb_is_correct( path, exp_red_vals, exp_green_vals, exp_blue_vals)
def _read_curves(self, upper_left_cor, lower_right_cor, test_file): constraint = shapes.RegionConstraint.from_simple( upper_left_cor, lower_right_cor) reader = hazard_parser.NrmlFile(helpers.get_output_path(test_file)) return reader.filter(constraint)
def test_writes_the_config_only_once(self): data = [(shapes.Site(-122.5000, 37.5000), {"IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_1", "IMLValues": [5.0, 6.0, 7.0], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [0.1, 0.2, 0.3]}), (shapes.Site(-122.4000, 37.5000), {"IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_2", "IMLValues": [5.0, 6.0, 7.0], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [0.4, 0.5, 0.6]})] path = helpers.get_output_path(TEST_FILE_CONFIG_ONCE) self._initialize_writer(path) self.writer.serialize(data) self.assertTrue(xml.validates_against_xml_schema(path))
def test_writes_a_single_result(self): data = [(shapes.Site(-122.5000, 37.5000), {"IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_1", "IMLValues": [5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [9.8728e-01, 9.8266e-01, 9.4957e-01, 9.0326e-01, 8.1956e-01, 6.9192e-01, 5.2866e-01, 3.6143e-01, 2.4231e-01, 2.2452e-01, 1.2831e-01, 7.0352e-02, 3.6060e-02, 1.6579e-02, 6.4213e-03, 2.0244e-03, 4.8605e-04, 8.1752e-05, 7.3425e-06]})] path = helpers.get_output_path(TEST_FILE_SINGLE_RESULT) self._initialize_writer(path) self.writer.serialize(data) self.assertTrue(xml.validates_against_xml_schema(path)) self.assertTrue(XML_METADATA in self._result_as_string(path)) self.read_curves = self._read_curves( (-123.0, 38.0), (-122.0, 35.0), TEST_FILE_SINGLE_RESULT) self._assert_number_of_curves_is(1) self._assert_curves_are(data)
def _read_curves(self, upper_left_cor, lower_right_cor, test_file): constraint = shapes.RegionConstraint.from_simple( upper_left_cor, lower_right_cor) reader = hazard_parser.NrmlFile( helpers.get_output_path(test_file)) return reader.filter(constraint)
def test_geotiff_generation_and_metadata_validation(self): """Create a GeoTIFF, and check if it has the correct metadata.""" path = helpers.get_output_path(GEOTIFF_FILENAME_WITHOUT_NUMBER) smallregion = shapes.Region.from_coordinates(TEST_REGION_SMALL) gwriter = geotiff.LossMapGeoTiffFile( path, smallregion.grid, normalize=False) gwriter.close() self._assert_geotiff_metadata_is_correct(path, smallregion)
def setUp(self): self.loss_curve_path = helpers.get_output_path(LOSS_XML_OUTPUT_FILE) self.loss_ratio_curve_path = helpers.get_output_path( LOSS_RATIO_XML_OUTPUT_FILE) self.single_loss_curve_path = helpers.get_output_path( SINGLE_LOSS_XML_OUTPUT_FILE) self.single_loss_ratio_curve_path = helpers.get_output_path( SINGLE_LOSS_RATIO_XML_OUTPUT_FILE) self.schema_path = NRML_SCHEMA_PATH # Build up some sample loss/loss ratio curves here first_site = shapes.Site(-117.0, 38.0) second_site = shapes.Site(-118.0, 39.0) first_asset_a = {"assetID": "a1711", "endBranchLabel": "A"} first_asset_b = {"assetID": "a1711", "endBranchLabel": "B"} second_asset_a = {"assetID": "a1712", "endBranchLabel": "A"} second_asset_b = {"assetID": "a1712", "endBranchLabel": "B"} self.loss_curves = [(first_site, (TEST_LOSS_CURVE, first_asset_a)), (first_site, (TEST_LOSS_CURVE, first_asset_b)), (second_site, (TEST_LOSS_CURVE, second_asset_a)), (second_site, (TEST_LOSS_CURVE, second_asset_b))] self.loss_ratio_curves = [ (first_site, (TEST_LOSS_RATIO_CURVE, first_asset_a)), (first_site, (TEST_LOSS_RATIO_CURVE, first_asset_b)), (second_site, (TEST_LOSS_RATIO_CURVE, second_asset_a)), (second_site, (TEST_LOSS_RATIO_CURVE, second_asset_b)) ] self.single_loss_curve = [(first_site, (TEST_LOSS_CURVE, first_asset_a))] self.single_loss_ratio_curve = [(first_site, (TEST_LOSS_RATIO_CURVE, first_asset_a))] # loss curve that fails with inconsistent sites for an asset self.loss_curves_fail = [ (first_site, (TEST_LOSS_CURVE, first_asset_a)), (second_site, (TEST_LOSS_CURVE, first_asset_a)) ]
def setUp(self): self.loss_curve_path = helpers.get_output_path(LOSS_XML_OUTPUT_FILE) self.loss_ratio_curve_path = helpers.get_output_path( LOSS_RATIO_XML_OUTPUT_FILE) self.single_loss_curve_path = helpers.get_output_path( SINGLE_LOSS_XML_OUTPUT_FILE) self.single_loss_ratio_curve_path = helpers.get_output_path( SINGLE_LOSS_RATIO_XML_OUTPUT_FILE) self.schema_path = NRML_SCHEMA_PATH # Build up some sample loss/loss ratio curves here first_site = shapes.Site(-117.0, 38.0) second_site = shapes.Site(-118.0, 39.0) first_asset_a = {"assetID": "a1711", "endBranchLabel": "A"} first_asset_b = {"assetID": "a1711", "endBranchLabel": "B"} second_asset_a = {"assetID": "a1712", "endBranchLabel": "A"} second_asset_b = {"assetID": "a1712", "endBranchLabel": "B"} self.loss_curves = [ (first_site, (TEST_LOSS_CURVE, first_asset_a)), (first_site, (TEST_LOSS_CURVE, first_asset_b)), (second_site, (TEST_LOSS_CURVE, second_asset_a)), (second_site, (TEST_LOSS_CURVE, second_asset_b))] self.loss_ratio_curves = [ (first_site, (TEST_LOSS_RATIO_CURVE, first_asset_a)), (first_site, (TEST_LOSS_RATIO_CURVE, first_asset_b)), (second_site, (TEST_LOSS_RATIO_CURVE, second_asset_a)), (second_site, (TEST_LOSS_RATIO_CURVE, second_asset_b))] self.single_loss_curve = [ (first_site, (TEST_LOSS_CURVE, first_asset_a))] self.single_loss_ratio_curve = [ (first_site, (TEST_LOSS_RATIO_CURVE, first_asset_a))] # loss curve that fails with inconsistent sites for an asset self.loss_curves_fail = [ (first_site, (TEST_LOSS_CURVE, first_asset_a)), (second_site, (TEST_LOSS_CURVE, first_asset_a))]
def test_writes_multiple_results_with_statistics(self): data = [(shapes.Site(-122.5000, 37.5000), {"nrml_id": "nrml_instance_1", "hazres_id": "hazard_result_0001", "hcfield_id": "hazard_field_one", "hcnode_id": "the_hazard_node_1000", "IDmodel": "foo", "investigationTimeSpan": 50.0, "statistics": "quantile", "quantileValue": "0.5", "IMLValues": [5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [9.8728e-01, 9.8266e-01, 9.4957e-01, 9.0326e-01, 8.1956e-01, 6.9192e-01, 5.2866e-01, 3.6143e-01, 2.4231e-01, 2.2452e-01, 1.2831e-01, 7.0352e-02, 3.6060e-02, 1.6579e-02, 6.4213e-03, 2.0244e-03, 4.8605e-04, 8.1752e-05, 7.3425e-06]}), (shapes.Site(-122.4000, 37.5000), {"IDmodel": "foo", "investigationTimeSpan": 50.0, "statistics": "quantile", "quantileValue": "0.5", "IMLValues": [5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01, 8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01, 2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02, 7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06]})] path = helpers.get_output_path(TEST_FILE_STATISTICS) self._initialize_writer(path) self.writer.serialize(data) self._is_xml_valid(path) self.readed_curves = self._read_curves( (-123.0, 38.0), (-120.0, 35.0), TEST_FILE_STATISTICS) self._assert_number_of_curves_is(2) self._assert_curves_are(data)
def test_serializes_gmf(self): path = helpers.get_output_path(GMF_NORUPTURE_TEST_FILE) writer = hazard_output.GMFXMLWriter(path) writer.serialize(GMF_NORUPTURE_TEST_DATA) check_data = {} reader = hazard_parser.GMFReader(path) for curr_site, curr_attribute in reader: check_data[curr_site] = curr_attribute self.assertEqual(check_data, GMF_NORUPTURE_TEST_DATA)
def test_lossmap_geotiff_generation_with_number_in_filename(self): """Create a GeoTIFF with a number in its filename. This test has been written because it has been reported that numbers in the filename do not work.""" path = helpers.get_output_path(GEOTIFF_FILENAME_WITH_NUMBER) smallregion = shapes.Region.from_coordinates(TEST_REGION_SMALL) gwriter = geotiff.LossMapGeoTiffFile( path, smallregion.grid, normalize=False) gwriter.close() self._assert_geotiff_metadata_is_correct(path, smallregion)
def test_geotiff_generation_and_metadata_validation(self): """Create a GeoTIFF, and check if it has the correct metadata.""" path = helpers.get_output_path(GEOTIFF_FILENAME_WITHOUT_NUMBER) smallregion = shapes.Region.from_coordinates(TEST_REGION_SMALL) gwriter = geotiff.LossMapGeoTiffFile(path, smallregion.grid, normalize=False) gwriter.close() self._assert_geotiff_metadata_is_correct(path, smallregion)
def test_lossmap_geotiff_generation_with_number_in_filename(self): """Create a GeoTIFF with a number in its filename. This test has been written because it has been reported that numbers in the filename do not work.""" path = helpers.get_output_path(GEOTIFF_FILENAME_WITH_NUMBER) smallregion = shapes.Region.from_coordinates(TEST_REGION_SMALL) gwriter = geotiff.LossMapGeoTiffFile(path, smallregion.grid, normalize=False) gwriter.close() self._assert_geotiff_metadata_is_correct(path, smallregion)
def test_geotiff_generation_colorscale(self): """Check RGB geotiff generation with colorscale for GMF.""" path = helpers.get_output_path(GEOTIFF_FILENAME_COLORSCALE) asymmetric_region = shapes.Region.from_coordinates( TEST_REGION_LARGE_ASYMMETRIC) gwriter = geotiff.GMFGeoTiffFile(path, asymmetric_region.grid, discrete=False) reference_raster = numpy.zeros((asymmetric_region.grid.rows, asymmetric_region.grid.columns), dtype=numpy.float) self._fill_rasters(asymmetric_region, gwriter, reference_raster, self._colorscale_fill) gwriter.close()
def setUp(self): self.loss_curve_path = helpers.get_output_path(LOSS_XML_OUTPUT_FILE) self.loss_ratio_curve_path = helpers.get_output_path( LOSS_RATIO_XML_OUTPUT_FILE) self.single_loss_curve_path = helpers.get_output_path( SINGLE_LOSS_XML_OUTPUT_FILE) self.single_loss_ratio_curve_path = helpers.get_output_path( SINGLE_LOSS_RATIO_XML_OUTPUT_FILE) self.schema_path = NRML_SCHEMA_PATH # Build up some sample loss/loss ratio curves here first_site = shapes.Site(-117.0, 38.0) second_site = shapes.Site(-118.0, 39.0) first_asset = models.ExposureData(asset_ref='a1711') second_asset = models.ExposureData(asset_ref='a1712') self.loss_curves = [ (first_site, (TEST_LOSS_CURVE, first_asset)), (second_site, (TEST_LOSS_CURVE, second_asset))] self.loss_ratio_curves = [ (first_site, (TEST_LOSS_RATIO_CURVE, first_asset)), (second_site, (TEST_LOSS_RATIO_CURVE, second_asset))] self.single_loss_curve = [ (first_site, (TEST_LOSS_CURVE, first_asset))] self.single_loss_ratio_curve = [ (first_site, (TEST_LOSS_RATIO_CURVE, first_asset))] # loss curve that fails with inconsistent sites for an asset self.loss_curves_fail = [ (first_site, (TEST_LOSS_CURVE, first_asset)), (second_site, (TEST_LOSS_CURVE, first_asset))]
def test_geotiff_generation_colorscale(self): """Check RGB geotiff generation with colorscale for GMF.""" path = helpers.get_output_path(GEOTIFF_FILENAME_COLORSCALE) asymmetric_region = shapes.Region.from_coordinates( TEST_REGION_LARGE_ASYMMETRIC) gwriter = geotiff.GMFGeoTiffFile(path, asymmetric_region.grid, discrete=False) reference_raster = numpy.zeros( (asymmetric_region.grid.rows, asymmetric_region.grid.columns), dtype=numpy.float) self._fill_rasters(asymmetric_region, gwriter, reference_raster, self._colorscale_fill) gwriter.close()
def test_writes_multiple_results_with_one_branch_level(self): data = [(shapes.Site(-122.5000, 37.5000), {"IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_1", "IMLValues": [5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [9.8728e-01, 9.8266e-01, 9.4957e-01, 9.0326e-01, 8.1956e-01, 6.9192e-01, 5.2866e-01, 3.6143e-01, 2.4231e-01, 2.2452e-01, 1.2831e-01, 7.0352e-02, 3.6060e-02, 1.6579e-02, 6.4213e-03, 2.0244e-03, 4.8605e-04, 8.1752e-05, 7.3425e-06]}), (shapes.Site(-122.4000, 37.5000), {"IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_1", "IMLValues": [5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01, 8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01, 2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02, 7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06]})] path = helpers.get_output_path(TEST_FILE_MULTIPLE_ONE_BRANCH) self._initialize_writer(path) self.writer.serialize(data) self.assertTrue(xml.validates_against_xml_schema(path)) self.read_curves = self._read_curves( (-123.0, 38.0), (-120.0, 35.0), TEST_FILE_MULTIPLE_ONE_BRANCH) self._assert_number_of_curves_is(2) self._assert_curves_are(data)
def test_geotiff_generation_discrete_colorscale(self): """Check RGB geotiff generation with colorscale for GMF. Use discrete colorscale based on IML values, with default IML.""" path = helpers.get_output_path(GEOTIFF_FILENAME_DISCRETE_COLORSCALE) asymmetric_region = shapes.Region.from_coordinates( TEST_REGION_LARGE_ASYMMETRIC) gwriter = geotiff.GMFGeoTiffFile(path, asymmetric_region.grid, iml_list=None, discrete=True, colormap=geotiff.COLORMAPS['gmt-seis']) reference_raster = numpy.zeros((asymmetric_region.grid.rows, asymmetric_region.grid.columns), dtype=numpy.float) self._fill_rasters(asymmetric_region, gwriter, reference_raster, self._colorscale_cuts_fill) gwriter.close()
def test_geotiff_generation_nondefault_colorscale(self): """Check RGB geotiff generation with colorscale for GMF. Use alternative colorscale.""" path = helpers.get_output_path(GEOTIFF_FILENAME_NONDEFAULT_COLORSCALE) asymmetric_region = shapes.Region.from_coordinates( TEST_REGION_LARGE_ASYMMETRIC) gwriter = geotiff.GMFGeoTiffFile(path, asymmetric_region.grid, iml_list=None, discrete=False, colormap=geotiff.COLORMAPS['gmt-green-red']) reference_raster = numpy.zeros((asymmetric_region.grid.rows, asymmetric_region.grid.columns), dtype=numpy.float) self._fill_rasters(asymmetric_region, gwriter, reference_raster, self._colorscale_cuts_fill) gwriter.close()
def test_writes_multiple_results_with_different_branch_levels(self): data = [(shapes.Site(-122.5000, 37.5000), { "IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_1", "IMLValues": [5.0, 6.0, 7.0], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [0.1, 0.2, 0.3] }), (shapes.Site(-122.5000, 37.5000), { "IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_2", "IMLValues": [5.0, 6.0, 7.0], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [0.1, 0.2, 0.3] }), (shapes.Site(-122.4000, 37.5000), { "IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_2", "IMLValues": [8.0, 9.0, 10.0], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [0.1, 0.2, 0.3] })] path = helpers.get_output_path(TEST_FILE_MULTIPLE_DIFFERENT_BRANCHES) self._initialize_writer(path) self.writer.serialize(data) self._is_xml_valid(path) self.readed_curves = self._read_curves( (-123.0, 38.0), (-120.0, 35.0), TEST_FILE_MULTIPLE_DIFFERENT_BRANCHES) self._assert_number_of_curves_is(3) self._assert_curves_are(data)
def test_geotiff_generation_and_simple_raster_validation(self): """Create a GeoTIFF and assign values to the raster nodes according to a simple function. Then check if the raster values have been set correctly.""" path = helpers.get_output_path(GEOTIFF_FILENAME_SQUARE_REGION) squareregion = shapes.Region.from_coordinates(TEST_REGION_SQUARE) gwriter = geotiff.LossMapGeoTiffFile( path, squareregion.grid, normalize=False) reference_raster = numpy.zeros((squareregion.grid.rows, squareregion.grid.columns), dtype=numpy.float) self._fill_rasters(squareregion, gwriter, reference_raster, self._trivial_fill) gwriter.close() self._assert_geotiff_metadata_and_raster_is_correct(path, squareregion, GEOTIFF_USED_CHANNEL_IDX, reference_raster)
def test_geotiff_generation_and_simple_raster_validation(self): """Create a GeoTIFF and assign values to the raster nodes according to a simple function. Then check if the raster values have been set correctly.""" path = helpers.get_output_path(GEOTIFF_FILENAME_SQUARE_REGION) squareregion = shapes.Region.from_coordinates(TEST_REGION_SQUARE) gwriter = geotiff.LossMapGeoTiffFile(path, squareregion.grid, normalize=False) reference_raster = numpy.zeros( (squareregion.grid.rows, squareregion.grid.columns), dtype=numpy.float) self._fill_rasters(squareregion, gwriter, reference_raster, self._trivial_fill) gwriter.close() self._assert_geotiff_metadata_and_raster_is_correct( path, squareregion, GEOTIFF_USED_CHANNEL_IDX, reference_raster)
def test_geotiff_loss_ratio_map_colorscale(self): path = helpers.get_output_path(GEOTIFF_LOSS_RATIO_MAP_COLORSCALE) asymmetric_region = shapes.Region.from_coordinates( TEST_REGION_LARGE_ASYMMETRIC) gwriter = geotiff.LossMapGeoTiffFile( path, asymmetric_region.grid, pixel_type=gdal.GDT_Byte) reference_raster = numpy.zeros((asymmetric_region.grid.rows, asymmetric_region.grid.columns), dtype=numpy.float) color_fill = lambda x, y: (x * y) / 50 self._fill_rasters(asymmetric_region, gwriter, reference_raster, color_fill) gwriter.close() self._assert_geotiff_metadata_is_correct(path, asymmetric_region) self._assert_geotiff_band_min_max_values(path, GEOTIFF_USED_CHANNEL_IDX, 0, 240)
def test_writes_multiple_results_with_different_branch_levels(self): data = [(shapes.Site(-122.5000, 37.5000), {"IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_1", "IMLValues": [5.0, 6.0, 7.0], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [0.1, 0.2, 0.3]}), (shapes.Site(-122.5000, 37.5000), {"IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_2", "IMLValues": [5.0, 6.0, 7.0], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [0.1, 0.2, 0.3]}), (shapes.Site(-122.4000, 37.5000), {"IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_2", "IMLValues": [8.0, 9.0, 10.0], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [0.1, 0.2, 0.3]})] path = helpers.get_output_path( TEST_FILE_MULTIPLE_DIFFERENT_BRANCHES) self._initialize_writer(path) self.writer.serialize(data) self.assertTrue(xml.validates_against_xml_schema(path)) self.read_curves = self._read_curves( (-123.0, 38.0), (-120.0, 35.0), TEST_FILE_MULTIPLE_DIFFERENT_BRANCHES) self._assert_number_of_curves_is(3) self._assert_curves_are(data)
def test_lossmap_geotiff_generation_initialize_raster(self): """Create a GeoTIFF and initialize the raster to a given value. Then check through metadata if it has been done correctly. We check the minumum and maximum values of the band, which are expected to have the value of the raster nodes.""" path = helpers.get_output_path(GEOTIFF_FILENAME_WITH_NUMBER) smallregion = shapes.Region.from_coordinates(TEST_REGION_SMALL) gwriter = geotiff.LossMapGeoTiffFile( path, smallregion.grid, init_value=GEOTIFF_TEST_PIXEL_VALUE, pixel_type=gdal.GDT_Byte, normalize=False) gwriter.close() self._assert_geotiff_metadata_is_correct(path, smallregion) # assert that all raster pixels have the desired value self._assert_geotiff_band_min_max_values(path, GEOTIFF_USED_CHANNEL_IDX, GEOTIFF_TEST_PIXEL_VALUE, GEOTIFF_TEST_PIXEL_VALUE)
def test_geotiff_generation_discrete_colorscale(self): """Check RGB geotiff generation with colorscale for GMF. Use discrete colorscale based on IML values, with default IML.""" path = helpers.get_output_path(GEOTIFF_FILENAME_DISCRETE_COLORSCALE) asymmetric_region = shapes.Region.from_coordinates( TEST_REGION_LARGE_ASYMMETRIC) gwriter = geotiff.GMFGeoTiffFile( path, asymmetric_region.grid, iml_list=None, discrete=True, colormap=geotiff.COLORMAPS['gmt-seis']) reference_raster = numpy.zeros( (asymmetric_region.grid.rows, asymmetric_region.grid.columns), dtype=numpy.float) self._fill_rasters(asymmetric_region, gwriter, reference_raster, self._colorscale_cuts_fill) gwriter.close()
def test_loss_curve_plot_generation_multiple_sites_render_multi(self): """Create SVG plots for loss curves read from an NRML file. The file contains data for several sites. For each site, a separate SVG file is created.""" path = helpers.get_output_path(LOSS_CURVE_PLOT_FILENAME) loss_curve_path = helpers.get_data_path(LOSS_CURVE_PLOT_INPUTFILE) plotter = curve.RiskCurvePlotter(path, loss_curve_path, mode='loss', curve_title="This is a test loss curve", render_multi=True) # delete expected output files, if existing for svg_file in plotter.filenames(): if os.path.isfile(svg_file): os.remove(svg_file) plotter.plot(autoscale_y=True) for svg_file in plotter.filenames(): self.assertTrue(os.path.getsize(svg_file) > 0)
def test_geotiff_generation_nondefault_colorscale(self): """Check RGB geotiff generation with colorscale for GMF. Use alternative colorscale.""" path = helpers.get_output_path(GEOTIFF_FILENAME_NONDEFAULT_COLORSCALE) asymmetric_region = shapes.Region.from_coordinates( TEST_REGION_LARGE_ASYMMETRIC) gwriter = geotiff.GMFGeoTiffFile( path, asymmetric_region.grid, iml_list=None, discrete=False, colormap=geotiff.COLORMAPS['gmt-green-red']) reference_raster = numpy.zeros( (asymmetric_region.grid.rows, asymmetric_region.grid.columns), dtype=numpy.float) self._fill_rasters(asymmetric_region, gwriter, reference_raster, self._colorscale_cuts_fill) gwriter.close()
def test_writes_a_single_result(self): data = [(shapes.Site(-122.5000, 37.5000), { "IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_1", "IMLValues": [ 5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00 ], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [ 9.8728e-01, 9.8266e-01, 9.4957e-01, 9.0326e-01, 8.1956e-01, 6.9192e-01, 5.2866e-01, 3.6143e-01, 2.4231e-01, 2.2452e-01, 1.2831e-01, 7.0352e-02, 3.6060e-02, 1.6579e-02, 6.4213e-03, 2.0244e-03, 4.8605e-04, 8.1752e-05, 7.3425e-06 ] })] path = helpers.get_output_path(TEST_FILE_SINGLE_RESULT) self._initialize_writer(path) self.writer.serialize(data) self._is_xml_valid(path) self.assertTrue(XML_METADATA in self._result_as_string(path)) self.readed_curves = self._read_curves((-123.0, 38.0), (-122.0, 35.0), TEST_FILE_SINGLE_RESULT) self._assert_number_of_curves_is(1) self._assert_curves_are(data)
def test_geotiff_generation_asymmetric_pattern(self): """Create a GeoTIFF and assign values to the raster nodes according to a simple function. Use a somewhat larger, non-square region for that. Then check if the raster values have been set correctly.""" path = helpers.get_output_path( GEOTIFF_FILENAME_LARGE_ASYMMETRIC_REGION) asymmetric_region = shapes.Region.from_coordinates( TEST_REGION_LARGE_ASYMMETRIC) gwriter = geotiff.LossMapGeoTiffFile( path, asymmetric_region.grid, pixel_type=gdal.GDT_Float32, normalize=False) reference_raster = numpy.zeros((asymmetric_region.grid.rows, asymmetric_region.grid.columns), dtype=numpy.float) self._fill_rasters(asymmetric_region, gwriter, reference_raster, self._trivial_fill) gwriter.close() self._assert_geotiff_metadata_and_raster_is_correct(path, asymmetric_region, GEOTIFF_USED_CHANNEL_IDX, reference_raster)
def test_geotiff_loss_ratio_map_colorscale(self): path = helpers.get_output_path(GEOTIFF_LOSS_RATIO_MAP_COLORSCALE) asymmetric_region = shapes.Region.from_coordinates( TEST_REGION_LARGE_ASYMMETRIC) gwriter = geotiff.LossMapGeoTiffFile(path, asymmetric_region.grid, pixel_type=gdal.GDT_Byte) reference_raster = numpy.zeros( (asymmetric_region.grid.rows, asymmetric_region.grid.columns), dtype=numpy.float) color_fill = lambda x, y: (x * y) / 50 self._fill_rasters(asymmetric_region, gwriter, reference_raster, color_fill) gwriter.close() self._assert_geotiff_metadata_is_correct(path, asymmetric_region) self._assert_geotiff_band_min_max_values(path, GEOTIFF_USED_CHANNEL_IDX, 0, 240)
def test_geotiff_generation_explicit_colorscale_bins(self): """Check RGB geotiff generation with colorscale for GMF. Limits and bins of colorscale are explicitly given.""" path = helpers.get_output_path( GEOTIFF_FILENAME_EXPLICIT_COLORSCALE_BINS) asymmetric_region = shapes.Region.from_coordinates( TEST_REGION_LARGE_ASYMMETRIC) for test_number, test_list in enumerate(([0.9, 0.95, 1.0, 1.05], None)): curr_path = "%s.%s.tiff" % (path[0:-5], test_number) gwriter = geotiff.GMFGeoTiffFile(curr_path, asymmetric_region.grid, iml_list=test_list, discrete=False) reference_raster = numpy.zeros((asymmetric_region.grid.rows, asymmetric_region.grid.columns), dtype=numpy.float) self._fill_rasters(asymmetric_region, gwriter, reference_raster, self._colorscale_cuts_fill) gwriter.close()
def test_simple_curve_plot_generation(self): """Create an SVG plot of a single (hazard) curve for a single site from a dictionary.""" test_site = shapes.Site(-122, 38) test_end_branch = '1_1' test_hc_data = {test_end_branch: {'abscissa': [0.0, 1.0, 1.8], 'ordinate': [1.0, 0.5, 0.2], 'abscissa_property': 'PGA', 'ordinate_property': 'Probability of Exceedance', 'curve_title': 'Hazard Curve', 'Site': test_site}} path = helpers.get_output_path(HAZARDCURVE_PLOT_SIMPLE_FILENAME) plot = curve.CurvePlot(path) plot.write(test_hc_data) plot.close() # assert that file has been created and is not empty self.assertTrue(os.path.getsize(path) > 0) os.remove(path)
def test_loss_ratio_curve_plot_generation_multiple_sites(self): """Create SVG plots for loss ratio curves read from an NRML file. The file contains data for several sites. For each site, a separate SVG file is created.""" path = helpers.get_output_path(LOSS_RATIO_CURVE_PLOT_FILENAME) loss_ratio_curve_path = helpers.get_data_path( LOSS_RATIO_CURVE_PLOT_INPUTFILE) plotter = curve.RiskCurvePlotter(path, loss_ratio_curve_path, mode='loss_ratio') # delete expected output files, if existing for svg_file in plotter.filenames(): if os.path.isfile(svg_file): os.remove(svg_file) plotter.plot(autoscale_y=False) # assert that for each site in the NRML file an SVG has been created for svg_file in plotter.filenames(): self.assertTrue(os.path.getsize(svg_file) > 0)
def _test_hazard_map_fixed_scaling(region, hm_data): path = helpers.get_output_path( 'TEST_HAZARD_MAP_fixed_scaling.tiff') # expected colors for each pixel in the map: exp_red_vals = numpy.array([ [238, 255, 247, 238, 255, 238], [238, 255, 238, 238, 247, 238], [244, 247, 238, 255, 244, 238], [255, 238, 247, 244, 238, 244], [247, 255, 238, 255, 238, 238], [247, 244, 255, 238, 238, 238]]) exp_green_vals = numpy.array([ [79, 160, 215, 79, 160, 79], [79, 160, 79, 79, 215, 79], [116, 215, 79, 160, 116, 79], [160, 79, 215, 116, 79, 116], [215, 160, 79, 160, 79, 79], [215, 116, 189, 79, 79, 79]]) exp_blue_vals = numpy.array([ [77, 68, 103, 77, 68, 77], [77, 68, 77, 77, 103, 77], [74, 103, 77, 68, 74, 77], [68, 77, 103, 74, 77, 74], [103, 68, 77, 68, 77, 77], [103, 74, 86, 77, 77, 77]]) iml_min = 0.0 iml_max = 0.3 hm_writer = geotiff.HazardMapGeoTiffFile( path, small_region.grid, html_wrapper=True, iml_min_max=(iml_min, iml_max)) hm_writer.serialize(hm_data) self._assert_image_rgb_is_correct( path, exp_red_vals, exp_green_vals, exp_blue_vals)
def test_geotiff_generation_explicit_colorscale_bins(self): """Check RGB geotiff generation with colorscale for GMF. Limits and bins of colorscale are explicitly given.""" path = helpers.get_output_path( GEOTIFF_FILENAME_EXPLICIT_COLORSCALE_BINS) asymmetric_region = shapes.Region.from_coordinates( TEST_REGION_LARGE_ASYMMETRIC) for test_number, test_list in enumerate(([0.9, 0.95, 1.0, 1.05], None)): curr_path = "%s.%s.tiff" % (path[0:-5], test_number) gwriter = geotiff.GMFGeoTiffFile(curr_path, asymmetric_region.grid, iml_list=test_list, discrete=False) reference_raster = numpy.zeros( (asymmetric_region.grid.rows, asymmetric_region.grid.columns), dtype=numpy.float) self._fill_rasters(asymmetric_region, gwriter, reference_raster, self._colorscale_cuts_fill) gwriter.close()
def test_hazardcurve_plot_generation_multiple_sites_multiple_curves(self): """Create SVG plots for hazard curves read from an NRML file. The file contains data for several sites, and several end branches of the logic tree. For each site, a separate SVG file is created.""" path = helpers.get_output_path(HAZARDCURVE_PLOT_FILENAME) hazardcurve_path = helpers.get_data_path(HAZARDCURVE_PLOT_INPUTFILE) plotter = curve.HazardCurvePlotter(path, hazardcurve_path, curve_title='Example Hazard Curves') # delete expected output files, if existing for svg_file in plotter.filenames(): if os.path.isfile(svg_file): os.remove(svg_file) plotter.plot() # assert that for each site in the NRML file an SVG has been created # and is not empty for svg_file in plotter.filenames(): self.assertTrue(os.path.getsize(svg_file) > 0) os.remove(svg_file)
def test_geotiff_generation_asymmetric_pattern(self): """Create a GeoTIFF and assign values to the raster nodes according to a simple function. Use a somewhat larger, non-square region for that. Then check if the raster values have been set correctly.""" path = helpers.get_output_path( GEOTIFF_FILENAME_LARGE_ASYMMETRIC_REGION) asymmetric_region = shapes.Region.from_coordinates( TEST_REGION_LARGE_ASYMMETRIC) gwriter = geotiff.LossMapGeoTiffFile(path, asymmetric_region.grid, pixel_type=gdal.GDT_Float32, normalize=False) reference_raster = numpy.zeros( (asymmetric_region.grid.rows, asymmetric_region.grid.columns), dtype=numpy.float) self._fill_rasters(asymmetric_region, gwriter, reference_raster, self._trivial_fill) gwriter.close() self._assert_geotiff_metadata_and_raster_is_correct( path, asymmetric_region, GEOTIFF_USED_CHANNEL_IDX, reference_raster)
def _test_hazard_map_fixed_scaling(region, hm_data): path = helpers.get_output_path( 'TEST_HAZARD_MAP_fixed_scaling.tiff') # expected colors for each pixel in the map: exp_red_vals = numpy.array([[238, 255, 247, 238, 255, 238], [238, 255, 238, 238, 247, 238], [244, 247, 238, 255, 244, 238], [255, 238, 247, 244, 238, 244], [247, 255, 238, 255, 238, 238], [247, 244, 255, 238, 238, 238]]) exp_green_vals = numpy.array([[79, 160, 215, 79, 160, 79], [79, 160, 79, 79, 215, 79], [116, 215, 79, 160, 116, 79], [160, 79, 215, 116, 79, 116], [215, 160, 79, 160, 79, 79], [215, 116, 189, 79, 79, 79]]) exp_blue_vals = numpy.array([[77, 68, 103, 77, 68, 77], [77, 68, 77, 77, 103, 77], [74, 103, 77, 68, 74, 77], [68, 77, 103, 74, 77, 74], [103, 68, 77, 68, 77, 77], [103, 74, 86, 77, 77, 77]]) iml_min = 0.0 iml_max = 0.3 hm_writer = geotiff.HazardMapGeoTiffFile(path, small_region.grid, html_wrapper=True, iml_min_max=(iml_min, iml_max)) hm_writer.serialize(hm_data) self._assert_image_rgb_is_correct(path, exp_red_vals, exp_green_vals, exp_blue_vals)
def test_raises_an_error_if_no_curve_is_serialized(self): path = helpers.get_output_path(TEST_FILE) self._initialize_writer(path) self.assertRaises(RuntimeError, self.writer.close)
import os import unittest from lxml import etree from openquake import shapes from openquake import xml from openquake.nrml.utils import nrml_schema_file from tests.utils import helpers from openquake.db.models import ExposureData from openquake.output import risk as risk_output TEST_LOSS_MAP_XML_OUTPUT_PATH = helpers.get_output_path('test-loss-map.xml') TEST_NON_SCN_LOSS_MAP_XML_OUTPUT_PATH = helpers.get_output_path( 'test-non-det-loss-map.xml') EXPECTED_TEST_LOSS_MAP = helpers.get_data_path('expected-test-loss-map.xml') EXPECTED_TEST_NON_SCN_LOSS_MAP = helpers.get_data_path( 'expected-non-det-test-loss-map.xml') NRML_SCHEMA_PATH = nrml_schema_file() LOSS_MAP_METADATA = { 'nrmlID': 'test_nrml_id', 'riskResultID': 'test_rr_id', 'lossMapID': 'test_lm_id', 'endBranchLabel': 'test_ebl', 'lossCategory': 'economic_loss', 'unit': 'EUR'}
def test_writes_multiple_results_with_one_branch_level(self): data = [ (shapes.Site(-122.5000, 37.5000), { "IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_1", "IMLValues": [ 5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00 ], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [ 9.8728e-01, 9.8266e-01, 9.4957e-01, 9.0326e-01, 8.1956e-01, 6.9192e-01, 5.2866e-01, 3.6143e-01, 2.4231e-01, 2.2452e-01, 1.2831e-01, 7.0352e-02, 3.6060e-02, 1.6579e-02, 6.4213e-03, 2.0244e-03, 4.8605e-04, 8.1752e-05, 7.3425e-06 ] }), (shapes.Site(-122.4000, 37.5000), { "IDmodel": "MMI_3_1", "investigationTimeSpan": 50.0, "endBranchLabel": "3_1", "IMLValues": [ 5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00 ], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [ 9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01, 8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01, 2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02, 7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06 ] }) ] path = helpers.get_output_path(TEST_FILE_MULTIPLE_ONE_BRANCH) self._initialize_writer(path) self.writer.serialize(data) self._is_xml_valid(path) self.readed_curves = self._read_curves((-123.0, 38.0), (-120.0, 35.0), TEST_FILE_MULTIPLE_ONE_BRANCH) self._assert_number_of_curves_is(2) self._assert_curves_are(data)
def test_writes_multiple_results_with_statistics(self): data = [ (shapes.Site(-122.5000, 37.5000), { "nrml_id": "nrml_instance_1", "hazres_id": "hazard_result_0001", "hcfield_id": "hazard_field_one", "hcnode_id": "the_hazard_node_1000", "IDmodel": "foo", "investigationTimeSpan": 50.0, "statistics": "quantile", "quantileValue": "0.5", "IMLValues": [ 5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00 ], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [ 9.8728e-01, 9.8266e-01, 9.4957e-01, 9.0326e-01, 8.1956e-01, 6.9192e-01, 5.2866e-01, 3.6143e-01, 2.4231e-01, 2.2452e-01, 1.2831e-01, 7.0352e-02, 3.6060e-02, 1.6579e-02, 6.4213e-03, 2.0244e-03, 4.8605e-04, 8.1752e-05, 7.3425e-06 ] }), (shapes.Site(-122.4000, 37.5000), { "IDmodel": "foo", "investigationTimeSpan": 50.0, "statistics": "quantile", "quantileValue": "0.5", "IMLValues": [ 5.0000e-03, 7.0000e-03, 1.3700e-02, 1.9200e-02, 2.6900e-02, 3.7600e-02, 5.2700e-02, 7.3800e-02, 9.8000e-02, 1.0300e-01, 1.4500e-01, 2.0300e-01, 2.8400e-01, 3.9700e-01, 5.5600e-01, 7.7800e-01, 1.0900e+00, 1.5200e+00, 2.1300e+00 ], "saPeriod": 0.1, "saDamping": 1.0, "IMT": "PGA", "PoEValues": [ 9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01, 8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01, 2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02, 7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06 ] }) ] path = helpers.get_output_path(TEST_FILE_STATISTICS) self._initialize_writer(path) self.writer.serialize(data) self._is_xml_valid(path) self.readed_curves = self._read_curves((-123.0, 38.0), (-120.0, 35.0), TEST_FILE_STATISTICS) self._assert_number_of_curves_is(2) self._assert_curves_are(data)
import os import unittest from lxml import etree from openquake import logs from openquake import shapes from openquake import xml from tests.utils import helpers from openquake.output import risk as risk_output LOG = logs.RISK_LOG TEST_LOSS_MAP_XML_OUTPUT_PATH = helpers.get_output_path('test-loss-map.xml') EXPECTED_TEST_LOSS_MAP = helpers.get_data_path('expected-test-loss-map.xml') NRML_SCHEMA_PATH = os.path.join(helpers.SCHEMA_DIR, xml.NRML_SCHEMA_FILE) LOSS_MAP_METADATA = { 'nrmlID': 'test_nrml_id', 'riskResultID': 'test_rr_id', 'lossMapID': 'test_lm_id', 'endBranchLabel': 'test_ebl', 'lossCategory': 'economic_loss', 'unit': 'EUR' } SITE_A = shapes.Site(-117.0, 38.0) SITE_A_ASSET_ONE = {'assetID': 'a1711'}
import os import unittest from lxml import etree from openquake import logs from openquake import shapes from openquake import xml from tests.utils import helpers from openquake.output import risk as risk_output LOG = logs.RISK_LOG TEST_LOSS_MAP_XML_OUTPUT_PATH = helpers.get_output_path('test-loss-map.xml') EXPECTED_TEST_LOSS_MAP = helpers.get_data_path('expected-test-loss-map.xml') NRML_SCHEMA_PATH = os.path.join(helpers.SCHEMA_DIR, xml.NRML_SCHEMA_FILE) LOSS_MAP_METADATA = { 'nrmlID': 'test_nrml_id', 'riskResultID': 'test_rr_id', 'lossMapID': 'test_lm_id', 'endBranchLabel': 'test_ebl', 'lossCategory': 'economic_loss', 'unit': 'EUR'} SITE_A = shapes.Site(-117.0, 38.0) SITE_A_ASSET_ONE = {'assetID': 'a1711'}