def _store_input_parameters(params, calc_mode, job_profile): """Store parameters in uiapi.oq_job_profile columns""" for name, param in PARAMS.items(): if calc_mode in param.modes and param.default is not None: setattr(job_profile, param.column, param.default) for name, value in params.items(): param = PARAMS[name] value = value.strip() if param.type in (models.BooleanField, models.NullBooleanField): value = value.lower() not in ('0', 'false') elif param.type == models.PolygonField: ewkt = shapes.polygon_ewkt_from_coords(value) value = GEOSGeometry(ewkt) elif param.type == models.MultiPointField: ewkt = shapes.multipoint_ewkt_from_coords(value) value = GEOSGeometry(ewkt) elif param.type == FloatArrayField: value = [float(v) for v in ARRAY_RE.split(value) if len(v)] elif param.type == CharArrayField: if param.to_db is not None: value = param.to_db(value) value = [str(v) for v in ARRAY_RE.split(value) if len(v)] elif param.to_db is not None: value = param.to_db(value) elif param.type == None: continue setattr(job_profile, param.column, value) if job_profile.imt != 'sa': job_profile.period = None job_profile.damping = None
def test__serialize_gmf(self): # GMFs are serialized as expected. location1 = java.jclass("Location")(1.0, 2.0) location2 = java.jclass("Location")(1.1, 2.1) site1 = java.jclass("Site")(location1) site2 = java.jclass("Site")(location2) hashmap = java.jclass("HashMap")() hashmap.put(site1, 0.1) hashmap.put(site2, 0.2) self.job_ctxt.params[NUMBER_OF_CALC_KEY] = "2" self.job_ctxt.params["SAVE_GMFS"] = "true" self.job_ctxt.params["REGION_VERTEX"] = ("0.0, 0.0, 0.0, 3.0, " "3.0, 3.0, 3.0, 0.0") self.job_profile.region = GEOSGeometry(shapes.polygon_ewkt_from_coords( '0.0, 0.0, 0.0, 3.0, 3.0, 3.0, 3.0, 0.0')) self.job_profile.gmf_calculation_number = 2 self.job_profile.save() calculator = scenario.ScenarioHazardCalculator(self.job_ctxt) with patch('openquake.calculators.hazard.scenario.core' '.ScenarioHazardCalculator' '.compute_ground_motion_field') as compute_gmf_mock: # the return value needs to be a Java HashMap compute_gmf_mock.return_value = hashmap calculator.execute() patht = os.path.join(self.job_ctxt.base_path, self.job_ctxt['OUTPUT_DIR'], "gmf-%s.xml") for cnum in range(self.job_profile.gmf_calculation_number): path = patht % cnum self.assertTrue( os.path.isfile(path), "GMF file not found (%s)" % path)
def test__serialize_gmf(self): # GMFs are serialized as expected. location1 = java.jclass("Location")(1.0, 2.0) location2 = java.jclass("Location")(1.1, 2.1) site1 = java.jclass("Site")(location1) site2 = java.jclass("Site")(location2) hashmap = java.jclass("HashMap")() hashmap.put(site1, 0.1) hashmap.put(site2, 0.2) self.job_ctxt.params[NUMBER_OF_CALC_KEY] = "2" self.job_ctxt.params["SAVE_GMFS"] = "true" self.job_ctxt.params["REGION_VERTEX"] = ("0.0, 0.0, 0.0, 3.0, " "3.0, 3.0, 3.0, 0.0") self.job_profile.region = GEOSGeometry( shapes.polygon_ewkt_from_coords( '0.0, 0.0, 0.0, 3.0, 3.0, 3.0, 3.0, 0.0')) self.job_profile.gmf_calculation_number = 2 self.job_profile.save() calculator = scenario.ScenarioHazardCalculator(self.job_ctxt) with patch('openquake.calculators.hazard.scenario.core' '.ScenarioHazardCalculator' '.compute_ground_motion_field') as compute_gmf_mock: # the return value needs to be a Java HashMap compute_gmf_mock.return_value = hashmap calculator.execute() patht = os.path.join(self.job_ctxt.base_path, self.job_ctxt['OUTPUT_DIR'], "gmf-%s.xml") for cnum in range(self.job_profile.gmf_calculation_number): path = patht % cnum self.assertTrue(os.path.isfile(path), "GMF file not found (%s)" % path)
def test_compute_bcr_in_the_classical_psha_calculator(self): self._compute_risk_classical_psha_setup() helpers.delete_profile(self.job) bcr_config = helpers.demo_file('benefit_cost_ratio/config.gem') job_profile, params, sections = engine.import_job_profile( bcr_config, self.job) # We need to adjust a few of the parameters for this test: job_profile.imls = [ 0.005, 0.007, 0.0098, 0.0137, 0.0192, 0.0269, 0.0376, 0.0527, 0.0738, 0.103, 0.145, 0.203, 0.284, 0.397, 0.556, 0.778] params['ASSET_LIFE_EXPECTANCY'] = '50' job_profile.asset_life_expectancy = 50 params['REGION_VERTEX'] = '0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0, 0.0' job_profile.region = GEOSGeometry(shapes.polygon_ewkt_from_coords( params['REGION_VERTEX'])) job_profile.save() job_ctxt = engine.JobContext( params, self.job_id, sections=sections, oq_job_profile=job_profile) calculator = classical_core.ClassicalRiskCalculator(job_ctxt) [input] = models.inputs4job(self.job.id, input_type="exposure") emdl = input.model() if not emdl: emdl = models.ExposureModel( owner=self.job.owner, input=input, description="c-psha test exposure model", category="c-psha power plants", stco_unit="watt", stco_type="aggregated", reco_unit="joule", reco_type="aggregated") emdl.save() assets = emdl.exposuredata_set.filter(asset_ref="rubcr") if not assets: asset = models.ExposureData(exposure_model=emdl, taxonomy="ID", asset_ref="rubcr", stco=1, reco=123.45, site=GEOSGeometry("POINT(1.0 1.0)")) asset.save() Block.from_kvs(self.job_id, self.block_id) calculator.compute_risk(self.block_id) result_key = kvs.tokens.bcr_block_key(self.job_id, self.block_id) res = kvs.get_value_json_decoded(result_key) expected_result = {'bcr': 0.0, 'eal_original': 0.003032, 'eal_retrofitted': 0.003032} helpers.assertDeepAlmostEqual( self, res, [[[1, 1], [[expected_result, "rubcr"]]]])
def test_polygon_ewkt(self): ''' Test typical usage of :py:function:`openquake.shapes.polygon_ewkt_from_coords` ''' # Note that the first & last coord are the same to form a closed loop. expected_ewkt = ('SRID=4326;POLYGON((-122.0 38.113, -122.114 38.113, ' '-122.57 38.111, -122.0 38.113))') coords = '38.113, -122.0, 38.113, -122.114, 38.111, -122.57' actual_ewkt = shapes.polygon_ewkt_from_coords(coords) self.assertEqual(expected_ewkt, actual_ewkt)
def test_polygon_ewkt(self): ''' Test typical usage of :py:function:`openquake.shapes.polygon_ewkt_from_coords` ''' # Note that the first & last coord are the same to form a closed loop. expected_ewkt = ( 'SRID=4326;POLYGON((-122.0 38.113, -122.114 38.113, ' '-122.57 38.111, -122.0 38.113))') coords = '38.113, -122.0, 38.113, -122.114, 38.111, -122.57' actual_ewkt = shapes.polygon_ewkt_from_coords(coords) self.assertEqual(expected_ewkt, actual_ewkt)
def test_compute_bcr(self): cfg_path = helpers.demo_file( 'probabilistic_event_based_risk/config.gem') helpers.delete_profile(self.job) job_profile, params, sections = engine.import_job_profile( cfg_path, self.job) job_profile.calc_mode = 'event_based_bcr' job_profile.interest_rate = 0.05 job_profile.asset_life_expectancy = 50 job_profile.region = GEOSGeometry(shapes.polygon_ewkt_from_coords( '0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0, 0.0')) job_profile.region_grid_spacing = 0.1 job_profile.maximum_distance = 200.0 job_profile.gmf_random_seed = None job_profile.save() params.update(dict(CALCULATION_MODE='Event Based BCR', INTEREST_RATE='0.05', ASSET_LIFE_EXPECTANCY='50', MAXIMUM_DISTANCE='200.0', REGION_VERTEX=('0.0, 0.0, 0.0, 2.0, ' '2.0, 2.0, 2.0, 0.0'), REGION_GRID_SPACING='0.1')) job_ctxt = engine.JobContext( params, self.job_id, sections=sections, oq_job_profile=job_profile) calculator = eb_core.EventBasedRiskCalculator(job_ctxt) self.block_id = 7 SITE = shapes.Site(1.0, 1.0) block = Block(self.job_id, self.block_id, (SITE, )) block.to_kvs() location = GEOSGeometry(SITE.point.to_wkt()) asset = models.ExposureData(exposure_model=self.emdl, taxonomy="ID", asset_ref=22.61, stco=1, reco=123.45, site=location) asset.save() calculator.compute_risk(self.block_id) result_key = kvs.tokens.bcr_block_key(self.job_id, self.block_id) result = kvs.get_value_json_decoded(result_key) expected_result = {'bcr': 0.0, 'eal_original': 0.0, 'eal_retrofitted': 0.0} helpers.assertDeepAlmostEqual( self, [[[1, 1], [[expected_result, "22.61"]]]], result)
def test_polygon_ewkt_round_float(self): ''' Test usage of :py:function:`openquake.shapes.polygon_ewkt_from_coords` to ensure that high-precision coordinate values are rounded down a reasonable level of precision. ''' # Note that the first & last coord are the same to form a closed loop. expected_ewkt = ('SRID=4326;POLYGON((-122.0 38.113, -122.114 38.113, ' '-122.57 38.1110001, -122.0 38.113))') coords = \ '38.113, -122.00000001, 38.113, -122.114, 38.11100006, -122.57' actual_ewkt = shapes.polygon_ewkt_from_coords(coords) self.assertEqual(expected_ewkt, actual_ewkt)
def test_polygon_ewkt_round_float(self): ''' Test usage of :py:function:`openquake.shapes.polygon_ewkt_from_coords` to ensure that high-precision coordinate values are rounded down a reasonable level of precision. ''' # Note that the first & last coord are the same to form a closed loop. expected_ewkt = ( 'SRID=4326;POLYGON((-122.0 38.113, -122.114 38.113, ' '-122.57 38.1110001, -122.0 38.113))') coords = \ '38.113, -122.00000001, 38.113, -122.114, 38.11100006, -122.57' actual_ewkt = shapes.polygon_ewkt_from_coords(coords) self.assertEqual(expected_ewkt, actual_ewkt)
def test_compute_risk_in_the_classical_psha_calculator(self): """ tests ClassicalRiskCalculator.compute_risk by retrieving all the loss curves in the kvs and checks their presence """ helpers.delete_profile(self.job) cls_risk_cfg = helpers.demo_file( 'classical_psha_based_risk/config.gem') job_profile, params, sections = engine.import_job_profile( cls_risk_cfg, self.job) # We need to adjust a few of the parameters for this test: params['REGION_VERTEX'] = '0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0, 0.0' job_profile.region = GEOSGeometry(shapes.polygon_ewkt_from_coords( params['REGION_VERTEX'])) job_profile.save() job_ctxt = engine.JobContext( params, self.job_id, sections=sections, oq_job_profile=job_profile) self._compute_risk_classical_psha_setup() calculator = classical_core.ClassicalRiskCalculator(job_ctxt) calculator.vuln_curves = {"ID": self.vuln_function} block = Block.from_kvs(self.job_id, self.block_id) # computes the loss curves and puts them in kvs calculator.compute_risk(self.block_id) for point in block.grid(job_ctxt.region): assets = BaseRiskCalculator.assets_for_cell( self.job_id, point.site) for asset in assets: loss_ratio_key = kvs.tokens.loss_ratio_key( self.job_id, point.row, point.column, asset.asset_ref) self.assertTrue(kvs.get_client().get(loss_ratio_key)) loss_key = kvs.tokens.loss_curve_key( self.job_id, point.row, point.column, asset.asset_ref) self.assertTrue(kvs.get_client().get(loss_key))
def prepare_job(params): """ Create a new OqJob and fill in the related OpParams entry. Returns the newly created job object. """ oqp = OqParams(upload=None) # fill in parameters if 'SITES' in params: if 'REGION_VERTEX' in params and 'REGION_GRID_SPACING' in params: raise RuntimeError( "Job config contains both sites and region of interest.") ewkt = shapes.multipoint_ewkt_from_coords(params['SITES']) sites = GEOSGeometry(ewkt) oqp.sites = sites elif 'REGION_VERTEX' in params and 'REGION_GRID_SPACING' in params: oqp.region_grid_spacing = float(params['REGION_GRID_SPACING']) ewkt = shapes.polygon_ewkt_from_coords(params['REGION_VERTEX']) region = GEOSGeometry(ewkt) oqp.region = region else: raise RuntimeError( "Job config contains neither sites nor region of interest.") # TODO specify the owner as a command line parameter owner = OqUser.objects.get(user_name='openquake') job = OqJob( owner=owner, path=None, job_type=CALCULATION_MODE[params['CALCULATION_MODE']]) oqp.job_type = job.job_type # fill-in parameters oqp.component = ENUM_MAP[params['COMPONENT']] oqp.imt = ENUM_MAP[params['INTENSITY_MEASURE_TYPE']] oqp.truncation_type = ENUM_MAP[params['GMPE_TRUNCATION_TYPE']] oqp.truncation_level = float(params['TRUNCATION_LEVEL']) oqp.reference_vs30_value = float(params['REFERENCE_VS30_VALUE']) if oqp.imt == 'sa': oqp.period = float(params.get('PERIOD', 0.0)) oqp.damping = float(params.get('DAMPING', 0.0)) if oqp.job_type == 'classical': oqp.imls = [float(v) for v in params['INTENSITY_MEASURE_LEVELS'].split(",")] oqp.poes = [float(v) for v in params['POES_HAZARD_MAPS'].split(" ")] if oqp.job_type in ('deterministic', 'event_based'): oqp.gm_correlated = ( params['GROUND_MOTION_CORRELATION'].lower() != 'false') if oqp.job_type in ('classical', 'event_based'): oqp.investigation_time = float(params.get('INVESTIGATION_TIME', 0.0)) oqp.min_magnitude = float(params.get('MINIMUM_MAGNITUDE', 0.0)) oqp.realizations = int(params['NUMBER_OF_LOGIC_TREE_SAMPLES']) else: oqp.gmf_calculation_number = int( params['NUMBER_OF_GROUND_MOTION_FIELDS_CALCULATIONS']) oqp.rupture_surface_discretization = float( params['RUPTURE_SURFACE_DISCRETIZATION']) if oqp.job_type == 'event_based': oqp.histories = int(params['NUMBER_OF_SEISMICITY_HISTORIES']) oqp.save() job.oq_params = oqp job.save() return job