def setUp(self): inputs = [("fragility", ""), ("exposure", "")] self.job = self.setup_classic_job(inputs=inputs) kvs.mark_job_as_current(self.job.id) kvs.cache_gc(self.job.id) self.site = Site(1.0, 1.0) block = Block(self.job.id, BLOCK_ID, [self.site]) block.to_kvs() # this region contains a single site, that is exactly # a site with longitude == 1.0 and latitude == 1.0 params = {"REGION_VERTEX": "1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0", "REGION_GRID_SPACING": "0.5", "BASE_PATH": ".", "OUTPUT_DIR": "."} self.job_ctxt = JobContext(params, self.job.id, oq_job=self.job) self.em = self._store_em() self._store_gmvs([0.40, 0.30, 0.45, 0.35, 0.40]) self.calculator = ScenarioDamageRiskCalculator(self.job_ctxt) # just stubbing out some preprocessing stuff... ScenarioDamageRiskCalculator.store_exposure_assets = lambda self: None ScenarioDamageRiskCalculator.store_fragility_model = lambda self: None ScenarioDamageRiskCalculator.partition = lambda self: None
def setUp(self): kvs.get_client().flushall() base_path = helpers.testdata_path("scenario") job = engine.prepare_job() self.job_profile, self.params, self.sections = ( engine.import_job_profile(SCENARIO_SMOKE_TEST, job)) self.job_ctxt = JobContext(self.params, job.id, sections=self.sections, base_path=base_path, oq_job_profile=self.job_profile, oq_job=job) self.job_ctxt.params[NUMBER_OF_CALC_KEY] = "1" self.job_ctxt.params['SERIALIZE_RESULTS_TO'] = 'xml' self.job_ctxt.serialize_results_to = ["xml"] # saving the default java implementation self.default = ( scenario.ScenarioHazardCalculator.compute_ground_motion_field) self.grid = self.job_ctxt.region.grid self.job_ctxt.to_kvs()
def setUp(self): client = kvs.get_client() # Delete managed job id info so we can predict the job key # which will be allocated for us client.delete(kvs.tokens.CURRENT_JOBS) self.generated_files = [] job = engine.prepare_job() jp, params, sections = import_job_profile(helpers.get_data_path( CONFIG_FILE), job) self.job_ctxt = JobContext( params, job.id, sections=sections, oq_job_profile=jp, oq_job=job) job = engine.prepare_job() jp, params, sections = import_job_profile(helpers.get_data_path( CONFIG_WITH_INCLUDES), job) self.job_ctxt_with_includes = JobContext( params, job.id, sections=sections, oq_job_profile=jp, oq_job=job)
def setUp(self): # Test 'event-based' job cfg_path = helpers.testdata_path("simplecase/config.gem") base_path = helpers.testdata_path("simplecase") oq_job = engine.prepare_job() oq_job_profile, params, sections = engine.import_job_profile( cfg_path, oq_job) self.eb_job = JobContext( params, oq_job.id, sections=sections, base_path=base_path, oq_job_profile=oq_job_profile, oq_job=oq_job)
def create_job(params, **kwargs): job_id = kwargs.pop('job_id', 0) return JobContext(params, job_id, **kwargs)
def test_generate_hazard_curves_using_classical_psha(self): def verify_realization_haz_curves_stored_to_kvs(the_job, keys): """ This just tests to make sure there something in the KVS for each key in given list of keys. This does NOT test the actual results. """ # TODO (LB): At some point we need to test the actual # results to verify they are correct realizations = int(the_job.params['NUMBER_OF_LOGIC_TREE_SAMPLES']) for realization in xrange(0, realizations): for site in the_job.sites_to_compute(): key = tokens.hazard_curve_poes_key(the_job.job_id, realization, site) self.assertTrue(key in keys, "Missing key %s" % key) def verify_mean_haz_curves_stored_to_kvs(the_job, keys): """ Make sure that the keys and non-empty values for mean hazard curves have been written to KVS.""" if the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true': LOG.debug("verifying KVS entries for mean hazard curves") for site in the_job.sites_to_compute(): key = tokens.mean_hazard_curve_key(the_job.job_id, site) self.assertTrue(key in keys, "Missing key %s" % key) def verify_mean_haz_maps_stored_to_kvs(the_job, calculator, keys): """ Make sure that the keys and non-empty values for mean hazard maps have been written to KVS.""" if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \ 'true'): LOG.debug("verifying KVS entries for mean hazard maps") for poe in calculator.poes_hazard_maps: for site in the_job.sites_to_compute(): key = tokens.mean_hazard_map_key( the_job.job_id, site, poe) self.assertTrue(key in keys, "Missing key %s" % key) def verify_quantile_haz_curves_stored_to_kvs(the_job, calculator, keys): """ Make sure that the keys and non-empty values for quantile hazard curves have been written to KVS.""" quantiles = calculator.quantile_levels LOG.debug("verifying KVS entries for quantile hazard curves, "\ "%s quantile values" % len(quantiles)) for quantile in quantiles: for site in the_job.sites_to_compute(): key = tokens.quantile_hazard_curve_key( the_job.job_id, site, quantile) self.assertTrue(key in keys, "Missing key %s" % key) def verify_quantile_haz_maps_stored_to_kvs(the_job, calculator, keys): """ Make sure that the keys and non-empty values for quantile hazard maps have been written to KVS.""" quantiles = calculator.quantile_levels if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and len(quantiles) > 0): poes = calculator.poes_hazard_maps LOG.debug("verifying KVS entries for quantile hazard maps, "\ "%s quantile values, %s PoEs" % ( len(quantiles), len(poes))) for quantile in quantiles: for poe in poes: for site in the_job.sites_to_compute(): key = tokens.quantile_hazard_map_key( the_job.job_id, site, poe, quantile) self.assertTrue(key in keys, "Missing key %s" % key) def verify_realization_haz_curves_stored_to_nrml(the_job, calculator): """Tests that a NRML file has been written for each realization, and that this file validates against the NRML schema. Does NOT test if results in NRML file are correct. """ realizations = int(the_job.params['NUMBER_OF_LOGIC_TREE_SAMPLES']) for realization in xrange(0, realizations): nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.hazard_curve_filename(realization)) LOG.debug("validating NRML file %s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against schema" \ % nrml_path) def verify_mean_haz_curves_stored_to_nrml(the_job, calculator): """Tests that a mean hazard curve NRML file has been written, and that this file validates against the NRML schema. Does NOT test if results in NRML file are correct. """ if the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true': nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.mean_hazard_curve_filename()) LOG.debug("validating NRML file %s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against schema" \ % nrml_path) def verify_mean_haz_maps_stored_to_nrml(the_job): """Tests that a mean hazard map NRML file has been written, and that this file validates against the NRML schema. Does NOT test if results in NRML file are correct. """ if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \ 'true'): for poe in calculator.poes_hazard_maps: nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.mean_hazard_map_filename(poe)) LOG.debug("validating NRML file for mean hazard map %s" \ % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against "\ "schema" % nrml_path) def verify_quantile_haz_curves_stored_to_nrml(the_job, calculator): """Tests that quantile hazard curve NRML files have been written, and that these file validate against the NRML schema. Does NOT test if results in NRML files are correct. """ for quantile in calculator.quantile_levels: nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.quantile_hazard_curve_filename(quantile)) LOG.debug("validating NRML file for quantile hazard curve: "\ "%s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against schema" \ % nrml_path) def verify_quantile_haz_maps_stored_to_nrml(the_job, calculator): """Tests that quantile hazard map NRML files have been written, and that these file validate against the NRML schema. Does NOT test if results in NRML files are correct. """ quantiles = calculator.quantile_levels if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and len(quantiles) > 0): for poe in calculator.poes_hazard_maps: for quantile in quantiles: nrml_path = os.path.join( "demos/classical_psha_simple/computed_output", calculator.quantile_hazard_map_filename( quantile, poe)) LOG.debug("validating NRML file for quantile hazard "\ "map: %s" % nrml_path) self.assertTrue(xml.validates_against_xml_schema( nrml_path, NRML_SCHEMA_PATH), "NRML instance file %s does not validate against "\ "schema" % nrml_path) base_path = helpers.testdata_path("classical_psha_simple") path = helpers.testdata_path("classical_psha_simple/config.gem") job = engine.prepare_job() job_profile, params, sections = engine.import_job_profile(path, job) the_job = JobContext(params, job.id, sections=sections, base_path=base_path, serialize_results_to=['db', 'xml'], oq_job_profile=job_profile, oq_job=job) the_job.to_kvs() calc_mode = job_profile.calc_mode calculator = CALCULATORS[calc_mode](the_job) used_keys = [] calculator.execute(used_keys) verify_realization_haz_curves_stored_to_kvs(the_job, used_keys) verify_realization_haz_curves_stored_to_nrml(the_job, calculator) # hazard curves: check results of mean and quantile computation verify_mean_haz_curves_stored_to_kvs(the_job, used_keys) verify_quantile_haz_curves_stored_to_kvs(the_job, calculator, used_keys) verify_mean_haz_curves_stored_to_nrml(the_job, calculator) verify_quantile_haz_curves_stored_to_nrml(the_job, calculator) # hazard maps: check results of mean and quantile computation verify_mean_haz_maps_stored_to_kvs(the_job, calculator, used_keys) verify_quantile_haz_maps_stored_to_kvs(the_job, calculator, used_keys) verify_mean_haz_maps_stored_to_nrml(the_job) verify_quantile_haz_maps_stored_to_nrml(the_job, calculator)