def setUp(self): # Test 'event-based' job cfg_path = helpers.testdata_path("simplecase/config.gem") base_path = helpers.testdata_path("simplecase") oq_job = engine.prepare_job() oq_job_profile, params, sections = engine.import_job_profile(cfg_path, oq_job) self.eb_job = JobContext( params, oq_job.id, sections=sections, base_path=base_path, oq_job_profile=oq_job_profile, oq_job=oq_job )
def setUp(self): # Test 'event-based' job cfg_path = helpers.testdata_path("simplecase/config.gem") base_path = helpers.testdata_path("simplecase") oq_job = engine.prepare_job() oq_job_profile, params, sections = engine.import_job_profile( cfg_path, oq_job) self.eb_job = JobContext( params, oq_job.id, sections=sections, base_path=base_path, oq_job_profile=oq_job_profile, oq_job=oq_job)
def setUp(self): kvs.get_client().flushall() base_path = helpers.testdata_path("scenario") job = engine.prepare_job() self.job_profile, self.params, self.sections = ( engine.import_job_profile(SCENARIO_SMOKE_TEST, job)) self.job_ctxt = JobContext(self.params, job.id, sections=self.sections, base_path=base_path, oq_job_profile=self.job_profile, oq_job=job) self.job_ctxt.params[NUMBER_OF_CALC_KEY] = "1" self.job_ctxt.params['SERIALIZE_RESULTS_TO'] = 'xml' self.job_ctxt.serialize_results_to = ["xml"] # saving the default java implementation self.default = ( scenario.ScenarioHazardCalculator.compute_ground_motion_field) self.grid = self.job_ctxt.region.grid self.job_ctxt.to_kvs()
def setUp(self): # Test 'event-based' job cfg_path = helpers.testdata_path("simplecase/config.gem") self.job = engine.prepare_job() self.jp, self.params, self.sections = engine.import_job_profile( cfg_path, self.job)
def setUp(self): kvs.get_client().flushall() base_path = helpers.testdata_path("scenario") self.job_profile, self.params, self.sections = ( engine.import_job_profile(SCENARIO_SMOKE_TEST)) calculation = OqCalculation(owner=self.job_profile.owner, oq_job_profile=self.job_profile) calculation.save() self.calc_proxy = CalculationProxy( self.params, calculation.id, sections=self.sections, base_path=base_path, oq_job_profile=self.job_profile, oq_calculation=calculation) self.calc_proxy.params[NUMBER_OF_CALC_KEY] = "1" self.calc_proxy.params['SERIALIZE_RESULTS_TO'] = 'xml' # saving the default java implementation self.default = ( scenario.ScenarioHazardCalculator.compute_ground_motion_field) self.grid = self.calc_proxy.region.grid self.calc_proxy.to_kvs()
def setUp(self): # Test 'event-based' job cfg_path = helpers.testdata_path("simplecase/config.gem") base_path = helpers.testdata_path("simplecase") oq_job_profile, params, sections = engine.import_job_profile(cfg_path) oq_calculation = OqCalculation(owner=oq_job_profile.owner, description="", oq_job_profile=oq_job_profile) oq_calculation.save() self.eb_job = CalculationProxy( params, oq_calculation.id, sections=sections, base_path=base_path, oq_job_profile=oq_job_profile, oq_calculation=oq_calculation, )
def test_read_sites_from_exposure(self): """ Test reading site data from an exposure file using :py:function:`openquake.risk.read_sites_from_exposure`. """ job_config_file = helpers.testdata_path("simplecase/config.gem") test_job = helpers.job_from_file(job_config_file) expected_sites = [ shapes.Site(-118.077721, 33.852034), shapes.Site(-118.067592, 33.855398), shapes.Site(-118.186739, 33.779013), ] self.assertEqual(expected_sites, engine.read_sites_from_exposure(test_job))
def test_read_sites_from_exposure(self): # Test reading site data from an exposure file using # :py:function:`openquake.risk.read_sites_from_exposure`. job_cfg = helpers.testdata_path('simplecase/config.gem') test_job = helpers.job_from_file(job_cfg) calc = core.EventBasedRiskCalculator(test_job) calc.store_exposure_assets() expected_sites = set([ shapes.Site(-118.077721, 33.852034), shapes.Site(-118.067592, 33.855398), shapes.Site(-118.186739, 33.779013)]) actual_sites = set(read_sites_from_exposure(test_job)) self.assertEqual(expected_sites, actual_sites)
def test_read_sites_from_exposure(self): # Test reading site data from an exposure file using # :py:function:`openquake.risk.read_sites_from_exposure`. job_cfg = helpers.testdata_path('simplecase/config.gem') test_job = helpers.job_from_file(job_cfg) calc = core.EventBasedRiskCalculator(test_job) calc.store_exposure_assets() expected_sites = set([ shapes.Site(-118.077721, 33.852034), shapes.Site(-118.067592, 33.855398), shapes.Site(-118.186739, 33.779013)]) actual_sites = set(engine.read_sites_from_exposure(test_job)) self.assertEqual(expected_sites, actual_sites)
def setUp(self): kvs.get_client().flushall() base_path = helpers.testdata_path("scenario") job = engine.prepare_job() self.job_profile, self.params, self.sections = ( engine.import_job_profile(SCENARIO_SMOKE_TEST, job)) self.job_ctxt = JobContext( self.params, job.id, sections=self.sections, base_path=base_path, oq_job_profile=self.job_profile, oq_job=job) self.job_ctxt.params[NUMBER_OF_CALC_KEY] = "1" self.job_ctxt.params['SERIALIZE_RESULTS_TO'] = 'xml' # saving the default java implementation self.default = ( scenario.ScenarioHazardCalculator.compute_ground_motion_field) self.grid = self.job_ctxt.region.grid self.job_ctxt.to_kvs()
def datapath(test, path): return helpers.testdata_path("%s/%s" % (test, path))
- hazard curves (with mean and quantile) - hazard maps (only mean and quantile) """ import os import unittest from openquake.engine import kvs from openquake.engine import logs from tests.utils import helpers LOG = logs.LOG TEST_JOB_FILE = helpers.testdata_path('simplecase/config.gem') SIMPLE_FAULT_SRC_MODEL_LT = helpers.demo_file( 'simple_fault_demo_hazard/source_model_logic_tree.xml') SIMPLE_FAULT_GMPE_LT = helpers.demo_file( 'simple_fault_demo_hazard/gmpe_logic_tree.xml') SIMPLE_FAULT_BASE_PATH = os.path.abspath( helpers.demo_file('simple_fault_demo_hazard')) def get_pattern(regexp): """Get all the values whose keys satisfy the given regexp. Return an empty list if there are no keys satisfying the given regxep. """
This module tests the risk side of the scenario event based calculation. """ import json import unittest from openquake import kvs from openquake import shapes from openquake.risk.job import scenario as risk_job_det from tests.utils import helpers from tests.utils.helpers import patch TEST_JOB_ID = "1234" TEST_REGION = shapes.Region.from_simple((0.1, 0.1), (0.2, 0.2)) TEST_JOB_FILE = helpers.testdata_path('scenario/config.gem') class ScenarioRiskTestCase(unittest.TestCase): """ Test case for module-level functions of the scenario risk job code. """ def test_load_gmvs_for_point(self): """ Exercises the function :py:func:`openquake.risk.job.scenario.load_gmvs_for_point`. """ # clear the kvs before running the test kvs.flush()
from openquake.engine import JobContext from openquake.job import params as job_params from openquake.kvs import tokens from openquake.nrml.utils import nrml_schema_file from tests.utils import helpers LOG = logs.LOG MEAN_GROUND_INTENSITY = ( '{"site":"+35.0000 +35.0000", "intensity": 1.9249e+00,' '"site":"+35.0500 +35.0000", "intensity": 1.9623e+00,' '"site":"+35.1000 +35.0000", "intensity": 2.0320e+00,' '"site":"+35.1500 +35.0000", "intensity": 2.0594e+00}') TEST_JOB_FILE = helpers.testdata_path('simplecase/config.gem') NRML_SCHEMA_PATH = nrml_schema_file() SIMPLE_FAULT_SRC_MODEL_LT = helpers.demo_file( 'simple_fault_demo_hazard/source_model_logic_tree.xml') SIMPLE_FAULT_GMPE_LT = helpers.demo_file( 'simple_fault_demo_hazard/gmpe_logic_tree.xml') SIMPLE_FAULT_BASE_PATH = os.path.abspath( helpers.demo_file('simple_fault_demo_hazard')) def get_pattern(regexp): """Get all the values whose keys satisfy the given regexp. Return an empty list if there are no keys satisfying the given regxep.
import math import unittest from django.contrib.gis.geos import GEOSGeometry from tests.utils import helpers from tests.utils.helpers import patch from openquake import engine from openquake import java from openquake import kvs from openquake import shapes from openquake.engine import JobContext from openquake.calculators.hazard.scenario import core as scenario SCENARIO_SMOKE_TEST = helpers.testdata_path("scenario/config.gem") NUMBER_OF_CALC_KEY = "NUMBER_OF_GROUND_MOTION_FIELDS_CALCULATIONS" def compute_ground_motion_field(self, _random_generator): """Stubbed version of the method that computes the ground motion field calling java stuff.""" hashmap = java.jclass("HashMap")() for site in self.job_ctxt.sites_to_compute(): location = java.jclass("Location")(site.latitude, site.longitude) site = java.jclass("Site")(location) hashmap.put(site, 0.5) return hashmap
def test_generate_hazard_curves_using_classical_psha(self): def verify_realization_haz_curves_stored_to_kvs(the_job, keys): """ This just tests to make sure there something in the KVS for each key in given list of keys. This does NOT test the actual results. """ # TODO (LB): At some point we need to test the actual # results to verify they are correct realizations = int( the_job.params['NUMBER_OF_LOGIC_TREE_SAMPLES']) for realization in xrange(0, realizations): for site in the_job.sites_to_compute(): key = tokens.hazard_curve_poes_key( the_job.job_id, realization, site) self.assertTrue(key in keys, "Missing key %s" % key) def verify_mean_haz_curves_stored_to_kvs(the_job, keys): """ Make sure that the keys and non-empty values for mean hazard curves have been written to KVS.""" if the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true': LOG.debug("verifying KVS entries for mean hazard curves") for site in the_job.sites_to_compute(): key = tokens.mean_hazard_curve_key(the_job.job_id, site) self.assertTrue(key in keys, "Missing key %s" % key) def verify_mean_haz_maps_stored_to_kvs(the_job, calculator, keys): """ Make sure that the keys and non-empty values for mean hazard maps have been written to KVS.""" if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \ 'true'): LOG.debug("verifying KVS entries for mean hazard maps") for poe in calculator.poes_hazard_maps: for site in the_job.sites_to_compute(): key = tokens.mean_hazard_map_key( the_job.job_id, site, poe) self.assertTrue(key in keys, "Missing key %s" % key) def verify_quantile_haz_curves_stored_to_kvs(the_job, calculator, keys): """ Make sure that the keys and non-empty values for quantile hazard curves have been written to KVS.""" quantiles = calculator.quantile_levels LOG.debug("verifying KVS entries for quantile hazard curves, "\ "%s quantile values" % len(quantiles)) for quantile in quantiles: for site in the_job.sites_to_compute(): key = tokens.quantile_hazard_curve_key( the_job.job_id, site, quantile) self.assertTrue(key in keys, "Missing key %s" % key) def verify_quantile_haz_maps_stored_to_kvs(the_job, calculator, keys): """ Make sure that the keys and non-empty values for quantile hazard maps have been written to KVS.""" quantiles = calculator.quantile_levels if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and len(quantiles) > 0): poes = calculator.poes_hazard_maps LOG.debug("verifying KVS entries for quantile hazard maps, "\ "%s quantile values, %s PoEs" % ( len(quantiles), len(poes))) for quantile in quantiles: for poe in poes: for site in the_job.sites_to_compute(): key = tokens.quantile_hazard_map_key( the_job.job_id, site, poe, quantile) self.assertTrue( key in keys, "Missing key %s" % key) def verify_realization_haz_curves_stored_to_nrml(the_job, calculator): """Tests that a NRML file has been written for each realization, and that this file validates against the NRML schema. Does NOT test if results in NRML file are correct. """ realizations = int( the_job.params['NUMBER_OF_LOGIC_TREE_SAMPLES']) for realization in xrange(0, realizations): nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.hazard_curve_filename(realization)) LOG.debug("validating NRML file %s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against schema" \ % nrml_path) def verify_mean_haz_curves_stored_to_nrml(the_job, calculator): """Tests that a mean hazard curve NRML file has been written, and that this file validates against the NRML schema. Does NOT test if results in NRML file are correct. """ if the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true': nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.mean_hazard_curve_filename()) LOG.debug("validating NRML file %s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against schema" \ % nrml_path) def verify_mean_haz_maps_stored_to_nrml(the_job): """Tests that a mean hazard map NRML file has been written, and that this file validates against the NRML schema. Does NOT test if results in NRML file are correct. """ if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \ 'true'): for poe in calculator.poes_hazard_maps: nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.mean_hazard_map_filename(poe)) LOG.debug("validating NRML file for mean hazard map %s" \ % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against "\ "schema" % nrml_path) def verify_quantile_haz_curves_stored_to_nrml(the_job, calculator): """Tests that quantile hazard curve NRML files have been written, and that these file validate against the NRML schema. Does NOT test if results in NRML files are correct. """ for quantile in calculator.quantile_levels: nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.quantile_hazard_curve_filename(quantile)) LOG.debug("validating NRML file for quantile hazard curve: "\ "%s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against schema" \ % nrml_path) def verify_quantile_haz_maps_stored_to_nrml(the_job, calculator): """Tests that quantile hazard map NRML files have been written, and that these file validate against the NRML schema. Does NOT test if results in NRML files are correct. """ quantiles = calculator.quantile_levels if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and len(quantiles) > 0): for poe in calculator.poes_hazard_maps: for quantile in quantiles: nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.quantile_hazard_map_filename(quantile, poe)) LOG.debug("validating NRML file for quantile hazard "\ "map: %s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against "\ "schema" % nrml_path) base_path = helpers.testdata_path("classical_psha_simple") path = helpers.testdata_path("classical_psha_simple/config.gem") job = engine.prepare_job() job_profile, params, sections = engine.import_job_profile(path, job) the_job = JobContext( params, job.id, sections=sections, base_path=base_path, serialize_results_to=['db', 'xml'], oq_job_profile=job_profile, oq_job=job) the_job.to_kvs() calc_mode = job_profile.calc_mode calculator = CALCULATORS[calc_mode](the_job) used_keys = [] calculator.execute(used_keys) verify_realization_haz_curves_stored_to_kvs(the_job, used_keys) verify_realization_haz_curves_stored_to_nrml(the_job, calculator) # hazard curves: check results of mean and quantile computation verify_mean_haz_curves_stored_to_kvs(the_job, used_keys) verify_quantile_haz_curves_stored_to_kvs(the_job, calculator, used_keys) verify_mean_haz_curves_stored_to_nrml(the_job, calculator) verify_quantile_haz_curves_stored_to_nrml(the_job, calculator) # hazard maps: check results of mean and quantile computation verify_mean_haz_maps_stored_to_kvs(the_job, calculator, used_keys) verify_quantile_haz_maps_stored_to_kvs(the_job, calculator, used_keys) verify_mean_haz_maps_stored_to_nrml(the_job) verify_quantile_haz_maps_stored_to_nrml(the_job, calculator)
def test_generate_hazard_curves_using_classical_psha(self): def verify_realization_haz_curves_stored_to_kvs(hazengine, keys): """ This just tests to make sure there something in the KVS for each key in given list of keys. This does NOT test the actual results. """ # TODO (LB): At some point we need to test the actual # results to verify they are correct realizations = int( hazengine.params['NUMBER_OF_LOGIC_TREE_SAMPLES']) for realization in xrange(0, realizations): for site in hazengine.sites_to_compute(): key = tokens.hazard_curve_poes_key( hazengine.job_id, realization, site) self.assertTrue(key in keys, "Missing key %s" % key) def verify_mean_haz_curves_stored_to_kvs(hazengine, keys): """ Make sure that the keys and non-empty values for mean hazard curves have been written to KVS.""" if hazengine.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true': LOG.debug("verifying KVS entries for mean hazard curves") for site in hazengine.sites_to_compute(): key = tokens.mean_hazard_curve_key(hazengine.job_id, site) self.assertTrue(key in keys, "Missing key %s" % key) def verify_mean_haz_maps_stored_to_kvs(hazengine, keys): """ Make sure that the keys and non-empty values for mean hazard maps have been written to KVS.""" if (hazengine.params[classical_psha.POES_PARAM_NAME] != '' and hazengine.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \ 'true'): LOG.debug("verifying KVS entries for mean hazard maps") for poe in hazengine.poes_hazard_maps: for site in hazengine.sites_to_compute(): key = tokens.mean_hazard_map_key( hazengine.job_id, site, poe) self.assertTrue(key in keys, "Missing key %s" % key) def verify_quantile_haz_curves_stored_to_kvs(hazengine, keys): """ Make sure that the keys and non-empty values for quantile hazard curves have been written to KVS.""" quantiles = hazengine.quantile_levels LOG.debug("verifying KVS entries for quantile hazard curves, "\ "%s quantile values" % len(quantiles)) for quantile in quantiles: for site in hazengine.sites_to_compute(): key = tokens.quantile_hazard_curve_key( hazengine.job_id, site, quantile) self.assertTrue(key in keys, "Missing key %s" % key) def verify_quantile_haz_maps_stored_to_kvs(hazengine, keys): """ Make sure that the keys and non-empty values for quantile hazard maps have been written to KVS.""" quantiles = hazengine.quantile_levels if (hazengine.params[classical_psha.POES_PARAM_NAME] != '' and len(quantiles) > 0): poes = hazengine.poes_hazard_maps LOG.debug("verifying KVS entries for quantile hazard maps, "\ "%s quantile values, %s PoEs" % ( len(quantiles), len(poes))) for quantile in quantiles: for poe in poes: for site in hazengine.sites_to_compute(): key = tokens.quantile_hazard_map_key( hazengine.job_id, site, poe, quantile) self.assertTrue( key in keys, "Missing key %s" % key) def verify_realization_haz_curves_stored_to_nrml(hazengine): """Tests that a NRML file has been written for each realization, and that this file validates against the NRML schema. Does NOT test if results in NRML file are correct. """ realizations = int( hazengine.params['NUMBER_OF_LOGIC_TREE_SAMPLES']) for realization in xrange(0, realizations): nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", hazengine.hazard_curve_filename(realization)) LOG.debug("validating NRML file %s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against schema" \ % nrml_path) def verify_mean_haz_curves_stored_to_nrml(hazengine): """Tests that a mean hazard curve NRML file has been written, and that this file validates against the NRML schema. Does NOT test if results in NRML file are correct. """ if hazengine.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true': nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", hazengine.mean_hazard_curve_filename()) LOG.debug("validating NRML file %s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against schema" \ % nrml_path) def verify_mean_haz_maps_stored_to_nrml(hazengine): """Tests that a mean hazard map NRML file has been written, and that this file validates against the NRML schema. Does NOT test if results in NRML file are correct. """ if (hazengine.params[classical_psha.POES_PARAM_NAME] != '' and hazengine.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \ 'true'): for poe in hazengine.poes_hazard_maps: nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", hazengine.mean_hazard_map_filename(poe)) LOG.debug("validating NRML file for mean hazard map %s" \ % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against "\ "schema" % nrml_path) def verify_quantile_haz_curves_stored_to_nrml(hazengine): """Tests that quantile hazard curve NRML files have been written, and that these file validate against the NRML schema. Does NOT test if results in NRML files are correct. """ for quantile in hazengine.quantile_levels: nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", hazengine.quantile_hazard_curve_filename(quantile)) LOG.debug("validating NRML file for quantile hazard curve: "\ "%s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against schema" \ % nrml_path) def verify_quantile_haz_maps_stored_to_nrml(hazengine): """Tests that quantile hazard map NRML files have been written, and that these file validate against the NRML schema. Does NOT test if results in NRML files are correct. """ quantiles = hazengine.quantile_levels if (hazengine.params[classical_psha.POES_PARAM_NAME] != '' and len(quantiles) > 0): for poe in hazengine.poes_hazard_maps: for quantile in quantiles: nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", hazengine.quantile_hazard_map_filename(quantile, poe)) LOG.debug("validating NRML file for quantile hazard "\ "map: %s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against "\ "schema" % nrml_path) hazengine = helpers.job_from_file( helpers.testdata_path("classical_psha_simple/config.gem")) with mixins.Mixin(hazengine, openquake.hazard.job.HazJobMixin): used_keys = [] hazengine.execute(used_keys) verify_realization_haz_curves_stored_to_kvs(hazengine, used_keys) verify_realization_haz_curves_stored_to_nrml(hazengine) # hazard curves: check results of mean and quantile computation verify_mean_haz_curves_stored_to_kvs(hazengine, used_keys) verify_quantile_haz_curves_stored_to_kvs(hazengine, used_keys) verify_mean_haz_curves_stored_to_nrml(hazengine) verify_quantile_haz_curves_stored_to_nrml(hazengine) # hazard maps: check results of mean and quantile computation verify_mean_haz_maps_stored_to_kvs(hazengine, used_keys) verify_quantile_haz_maps_stored_to_kvs(hazengine, used_keys) verify_mean_haz_maps_stored_to_nrml(hazengine) verify_quantile_haz_maps_stored_to_nrml(hazengine)
""" import math import unittest from tests.utils import helpers from tests.utils.helpers import patch from openquake import engine from openquake import java from openquake import kvs from openquake import shapes from openquake.engine import JobContext from openquake.calculators.hazard.scenario import core as scenario SCENARIO_SMOKE_TEST = helpers.testdata_path("scenario/config.gem") NUMBER_OF_CALC_KEY = "NUMBER_OF_GROUND_MOTION_FIELDS_CALCULATIONS" def compute_ground_motion_field(self, _random_generator): """Stubbed version of the method that computes the ground motion field calling java stuff.""" hashmap = java.jclass("HashMap")() for site in self.job_ctxt.sites_to_compute(): location = java.jclass("Location")(site.latitude, site.longitude) site = java.jclass("Site")(location) hashmap.put(site, 0.5) return hashmap
def test_generate_hazard_curves_using_classical_psha(self): def verify_realization_haz_curves_stored_to_kvs(the_job, keys): """ This just tests to make sure there something in the KVS for each key in given list of keys. This does NOT test the actual results. """ # TODO (LB): At some point we need to test the actual # results to verify they are correct realizations = int(the_job.params['NUMBER_OF_LOGIC_TREE_SAMPLES']) for realization in xrange(0, realizations): for site in the_job.sites_to_compute(): key = tokens.hazard_curve_poes_key(the_job.job_id, realization, site) self.assertTrue(key in keys, "Missing key %s" % key) def verify_mean_haz_curves_stored_to_kvs(the_job, keys): """ Make sure that the keys and non-empty values for mean hazard curves have been written to KVS.""" if the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true': LOG.debug("verifying KVS entries for mean hazard curves") for site in the_job.sites_to_compute(): key = tokens.mean_hazard_curve_key(the_job.job_id, site) self.assertTrue(key in keys, "Missing key %s" % key) def verify_mean_haz_maps_stored_to_kvs(the_job, calculator, keys): """ Make sure that the keys and non-empty values for mean hazard maps have been written to KVS.""" if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \ 'true'): LOG.debug("verifying KVS entries for mean hazard maps") for poe in calculator.poes_hazard_maps: for site in the_job.sites_to_compute(): key = tokens.mean_hazard_map_key( the_job.job_id, site, poe) self.assertTrue(key in keys, "Missing key %s" % key) def verify_quantile_haz_curves_stored_to_kvs(the_job, calculator, keys): """ Make sure that the keys and non-empty values for quantile hazard curves have been written to KVS.""" quantiles = calculator.quantile_levels LOG.debug("verifying KVS entries for quantile hazard curves, "\ "%s quantile values" % len(quantiles)) for quantile in quantiles: for site in the_job.sites_to_compute(): key = tokens.quantile_hazard_curve_key( the_job.job_id, site, quantile) self.assertTrue(key in keys, "Missing key %s" % key) def verify_quantile_haz_maps_stored_to_kvs(the_job, calculator, keys): """ Make sure that the keys and non-empty values for quantile hazard maps have been written to KVS.""" quantiles = calculator.quantile_levels if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and len(quantiles) > 0): poes = calculator.poes_hazard_maps LOG.debug("verifying KVS entries for quantile hazard maps, "\ "%s quantile values, %s PoEs" % ( len(quantiles), len(poes))) for quantile in quantiles: for poe in poes: for site in the_job.sites_to_compute(): key = tokens.quantile_hazard_map_key( the_job.job_id, site, poe, quantile) self.assertTrue(key in keys, "Missing key %s" % key) def verify_realization_haz_curves_stored_to_nrml(the_job, calculator): """Tests that a NRML file has been written for each realization, and that this file validates against the NRML schema. Does NOT test if results in NRML file are correct. """ realizations = int(the_job.params['NUMBER_OF_LOGIC_TREE_SAMPLES']) for realization in xrange(0, realizations): nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.hazard_curve_filename(realization)) LOG.debug("validating NRML file %s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against schema" \ % nrml_path) def verify_mean_haz_curves_stored_to_nrml(the_job, calculator): """Tests that a mean hazard curve NRML file has been written, and that this file validates against the NRML schema. Does NOT test if results in NRML file are correct. """ if the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true': nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.mean_hazard_curve_filename()) LOG.debug("validating NRML file %s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against schema" \ % nrml_path) def verify_mean_haz_maps_stored_to_nrml(the_job): """Tests that a mean hazard map NRML file has been written, and that this file validates against the NRML schema. Does NOT test if results in NRML file are correct. """ if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \ 'true'): for poe in calculator.poes_hazard_maps: nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.mean_hazard_map_filename(poe)) LOG.debug("validating NRML file for mean hazard map %s" \ % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against "\ "schema" % nrml_path) def verify_quantile_haz_curves_stored_to_nrml(the_job, calculator): """Tests that quantile hazard curve NRML files have been written, and that these file validate against the NRML schema. Does NOT test if results in NRML files are correct. """ for quantile in calculator.quantile_levels: nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.quantile_hazard_curve_filename(quantile)) LOG.debug("validating NRML file for quantile hazard curve: "\ "%s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against schema" \ % nrml_path) def verify_quantile_haz_maps_stored_to_nrml(the_job, calculator): """Tests that quantile hazard map NRML files have been written, and that these file validate against the NRML schema. Does NOT test if results in NRML files are correct. """ quantiles = calculator.quantile_levels if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and len(quantiles) > 0): for poe in calculator.poes_hazard_maps: for quantile in quantiles: nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.quantile_hazard_map_filename( quantile, poe)) LOG.debug("validating NRML file for quantile hazard "\ "map: %s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against "\ "schema" % nrml_path) base_path = helpers.testdata_path("classical_psha_simple") path = helpers.testdata_path("classical_psha_simple/config.gem") job = engine.prepare_job() job_profile, params, sections = engine.import_job_profile(path, job) the_job = JobContext(params, job.id, sections=sections, base_path=base_path, serialize_results_to=['db', 'xml'], oq_job_profile=job_profile, oq_job=job) the_job.to_kvs() calc_mode = job_profile.calc_mode calculator = CALCULATORS[calc_mode](the_job) used_keys = [] calculator.execute(used_keys) verify_realization_haz_curves_stored_to_kvs(the_job, used_keys) verify_realization_haz_curves_stored_to_nrml(the_job, calculator) # hazard curves: check results of mean and quantile computation verify_mean_haz_curves_stored_to_kvs(the_job, used_keys) verify_quantile_haz_curves_stored_to_kvs(the_job, calculator, used_keys) verify_mean_haz_curves_stored_to_nrml(the_job, calculator) verify_quantile_haz_curves_stored_to_nrml(the_job, calculator) # hazard maps: check results of mean and quantile computation verify_mean_haz_maps_stored_to_kvs(the_job, calculator, used_keys) verify_quantile_haz_maps_stored_to_kvs(the_job, calculator, used_keys) verify_mean_haz_maps_stored_to_nrml(the_job) verify_quantile_haz_maps_stored_to_nrml(the_job, calculator)