def get_fault_surface(fault): """ Simple and complex faults have different types of surfaces. The function builds the appropriate jpype java object for a given fault. :type fault: jpype java object of type `GEMFaultSourceData` or `GEMSubductionFaultSourceData` ('simple' or 'complex' faults, respectively) :returns: jpype java object of type `StirlingGriddedSurface` (for simple faults) or `ApproxEvenlyGriddedSurface` (for complex faults) """ fault_type = fault.__javaclass__.getName() if fault_type == '%s.GEMFaultSourceData' % SRC_DATA_PKG: surface = java.jclass('StirlingGriddedSurface')( fault.getTrace(), fault.getDip(), fault.getSeismDepthUpp(), fault.getSeismDepthLow(), DEFAULT_GRID_SPACING) elif fault_type == '%s.GEMSubductionFaultSourceData' % SRC_DATA_PKG: surface = java.jclass('ApproxEvenlyGriddedSurface')( fault.getTopTrace(), fault.getBottomTrace(), DEFAULT_GRID_SPACING) else: raise ValueError("Unexpected fault type: %s" % fault_type) return surface
def test__serialize_gmf(self): # GMFs are serialized as expected. location1 = java.jclass("Location")(1.0, 2.0) location2 = java.jclass("Location")(1.1, 2.1) site1 = java.jclass("Site")(location1) site2 = java.jclass("Site")(location2) hashmap = java.jclass("HashMap")() hashmap.put(site1, 0.1) hashmap.put(site2, 0.2) self.job_ctxt.params[NUMBER_OF_CALC_KEY] = "2" self.job_ctxt.params["SAVE_GMFS"] = "true" self.job_ctxt.params["REGION_VERTEX"] = ("0.0, 0.0, 0.0, 3.0, " "3.0, 3.0, 3.0, 0.0") self.job_profile.region = GEOSGeometry(shapes.polygon_ewkt_from_coords( '0.0, 0.0, 0.0, 3.0, 3.0, 3.0, 3.0, 0.0')) self.job_profile.gmf_calculation_number = 2 self.job_profile.save() calculator = scenario.ScenarioHazardCalculator(self.job_ctxt) with patch('openquake.calculators.hazard.scenario.core' '.ScenarioHazardCalculator' '.compute_ground_motion_field') as compute_gmf_mock: # the return value needs to be a Java HashMap compute_gmf_mock.return_value = hashmap calculator.execute() patht = os.path.join(self.job_ctxt.base_path, self.job_ctxt['OUTPUT_DIR'], "gmf-%s.xml") for cnum in range(self.job_profile.gmf_calculation_number): path = patht % cnum self.assertTrue( os.path.isfile(path), "GMF file not found (%s)" % path)
def set_java_site_parameters(jsite, sm_data): """Given a site model node and an OpenSHA `Site` object, set vs30, vs30, z2pt5, and z1pt0 parameters. :param jsite: A `org.opensha.commons.data.Site` jpype object. :param sm_data: :class:`openquake.db.models.SiteModel` instance. :returns: The ``jsite`` input object (so this function can be chained). """ vs30_param = java.jclass("DoubleParameter")("Vs30") vs30_param.setValue(sm_data.vs30) vs30_type_param = java.jclass("StringParameter")("Vs30 Type") vs30_type_param.setValue(sm_data.vs30_type.capitalize()) z1pt0_param = java.jclass("DoubleParameter")("Depth 1.0 km/sec") z1pt0_param.setValue(sm_data.z1pt0) z2pt5_param = java.jclass("DoubleParameter")("Depth 2.5 km/sec") z2pt5_param.setValue(sm_data.z2pt5) jsite.addParameter(vs30_param) jsite.addParameter(vs30_type_param) jsite.addParameter(z1pt0_param) jsite.addParameter(z2pt5_param) return jsite
def compute_uhs(the_job, site): """Given a `JobContext` and a site of interest, compute UHS. The Java `UHSCalculator` is called to do perform the core computation. :param the_job: :class:`openquake.engine.JobContext` instance. :param site: :class:`openquake.shapes.Site` instance. :returns: An `ArrayList` (Java object) of `UHSResult` objects, one per PoE. """ periods = list_to_jdouble_array(the_job["UHS_PERIODS"]) poes = list_to_jdouble_array(the_job["POES"]) imls = get_iml_list(the_job["INTENSITY_MEASURE_LEVELS"], the_job["INTENSITY_MEASURE_TYPE"]) max_distance = the_job["MAXIMUM_DISTANCE"] cache = java.jclass("KVS")(config.get("kvs", "host"), int(config.get("kvs", "port"))) erf = generate_erf(the_job.job_id, cache) gmpe_map = generate_gmpe_map(the_job.job_id, cache) set_gmpe_params(gmpe_map, the_job.params) uhs_calc = java.jclass("UHSCalculator")(periods, poes, imls, erf, gmpe_map, max_distance) uhs_results = uhs_calc.computeUHS( site.latitude, site.longitude, the_job["VS30_TYPE"], the_job["REFERENCE_VS30_VALUE"], the_job["DEPTHTO1PT0KMPERSEC"], the_job["REFERENCE_DEPTH_TO_2PT5KM_PER_SEC_PARAM"], ) return uhs_results
def test__serialize_gmf(self): # GMFs are serialized as expected. location1 = java.jclass("Location")(1.0, 2.0) location2 = java.jclass("Location")(1.1, 2.1) site1 = java.jclass("Site")(location1) site2 = java.jclass("Site")(location2) hashmap = java.jclass("HashMap")() hashmap.put(site1, 0.1) hashmap.put(site2, 0.2) self.job_ctxt.params[NUMBER_OF_CALC_KEY] = "2" self.job_ctxt.params["SAVE_GMFS"] = "true" self.job_ctxt.params["REGION_VERTEX"] = ("0.0, 0.0, 0.0, 3.0, " "3.0, 3.0, 3.0, 0.0") self.job_profile.region = GEOSGeometry( shapes.polygon_ewkt_from_coords( '0.0, 0.0, 0.0, 3.0, 3.0, 3.0, 3.0, 0.0')) self.job_profile.gmf_calculation_number = 2 self.job_profile.save() calculator = scenario.ScenarioHazardCalculator(self.job_ctxt) with patch('openquake.calculators.hazard.scenario.core' '.ScenarioHazardCalculator' '.compute_ground_motion_field') as compute_gmf_mock: # the return value needs to be a Java HashMap compute_gmf_mock.return_value = hashmap calculator.execute() patht = os.path.join(self.job_ctxt.base_path, self.job_ctxt['OUTPUT_DIR'], "gmf-%s.xml") for cnum in range(self.job_profile.gmf_calculation_number): path = patht % cnum self.assertTrue(os.path.isfile(path), "GMF file not found (%s)" % path)
def gmpe(self): """Load the ground motion prediction equation specified in the configuration file. The key used in the configuration file is GMPE_MODEL_NAME. :returns: jpype wrapper around an instance of the ground motion prediction equation. """ deserializer = java.jclass("GMPEDeserializer")() package_name = "org.opensha.sha.imr.attenRelImpl" class_name = self.params["GMPE_MODEL_NAME"] fqn = package_name + "." + class_name gmpe = deserializer.deserialize( java.jclass("JsonPrimitive")(fqn), None, None) tree_data = java.jclass("GmpeLogicTreeData")() tree_data.setGmpeParams( self.params["COMPONENT"], self.params["INTENSITY_MEASURE_TYPE"], jpype.JDouble(float(self.params["PERIOD"])), jpype.JDouble(float(self.params["DAMPING"])), self.params["GMPE_TRUNCATION_TYPE"], jpype.JDouble(float(self.params["TRUNCATION_LEVEL"])), "Total", jpype.JDouble(float(self.params["REFERENCE_VS30_VALUE"])), jpype.JObject(gmpe, java.jclass("AttenuationRelationship"))) return gmpe
def preloader(self, *args, **kwargs): """Validate job""" self.cache = java.jclass("KVS")( settings.KVS_HOST, settings.KVS_PORT) self.calc = java.jclass("LogicTreeProcessor")( self.cache, self.key) return fn(self, *args, **kwargs)
def generate_erf(self): """Generate the Earthquake Rupture Forecast from the currently stored source model logic tree.""" key = kvs.generate_product_key(self.id, kvs.tokens.SOURCE_MODEL_TOKEN) sources = java.jclass("JsonSerializer").getSourceListFromCache(self.cache, key) erf = java.jclass("GEM1ERF")(sources) self.calc.setGEM1ERFParams(erf) return erf
def test_when_measure_type_is_not_mmi_exp_is_stored(self): location = java.jclass("Location")(1.0, 2.0) site = java.jclass("Site")(location) hashmap = java.jclass("HashMap")() hashmap.put(site, 0.1) for gmv in det.gmf_to_dict(hashmap, "PGA"): self.assertEqual(math.exp(0.1), gmv["mag"])
def test_when_measure_type_is_mmi_we_store_as_is(self): location = java.jclass("Location")(1.0, 2.0) site = java.jclass("Site")(location) hashmap = java.jclass("HashMap")() hashmap.put(site, 0.1) for gmv in det.gmf_to_dict(hashmap, "MMI"): self.assertEqual(0.1, gmv["mag"])
def test_when_measure_type_is_mmi_we_store_as_is(self): location = java.jclass("Location")(1.0, 2.0) site = java.jclass("Site")(location) hashmap = java.jclass("HashMap")() hashmap.put(site, 0.1) for gmv in scenario.gmf_to_dict(hashmap, "MMI"): self.assertEqual(0.1, gmv["mag"])
def test_when_measure_type_is_not_mmi_exp_is_stored(self): location = java.jclass("Location")(1.0, 2.0) site = java.jclass("Site")(location) hashmap = java.jclass("HashMap")() hashmap.put(site, 0.1) for gmv in scenario.gmf_to_dict(hashmap, "PGA"): self.assertEqual(math.exp(0.1), gmv["mag"])
def generate_erf(self): """Generate the Earthquake Rupture Forecast from the currently stored source model logic tree.""" key = kvs.generate_product_key(self.id, kvs.tokens.SOURCE_MODEL_TOKEN) sources = java.jclass("JsonSerializer").getSourceListFromCache( self.cache, key) erf = java.jclass("GEM1ERF")(sources) self.calc.setGEM1ERFParams(erf) return erf
def compute_disagg_matrix(job_id, site, poe, result_dir): """ Compute a complete 5D Disaggregation matrix. This task leans heavily on the DisaggregationCalculator (in the OpenQuake Java lib) to handle this computation. The 5D matrix returned from the java calculator will be saved to a file in HDF5 format. :param job_id: id of the job record in the KVS :type job_id: `str` :param site: a single site of interest :type site: :class:`openquake.shapes.Site` instance` :param poe: Probability of Exceedence :type poe: `float` :param result_dir: location for the Java code to write the matrix in an HDF5 file (in a distributed environment, this should be the path of a mounted NFS) :returns: 2-tuple of (ground_motion_value, path_to_h5_matrix_file) """ the_job = job.Job.from_kvs(job_id) lat_bin_lims = the_job[job_cfg.LAT_BIN_LIMITS] lon_bin_lims = the_job[job_cfg.LON_BIN_LIMITS] mag_bin_lims = the_job[job_cfg.MAG_BIN_LIMITS] eps_bin_lims = the_job[job_cfg.EPS_BIN_LIMITS] jd = list_to_jdouble_array disagg_calc = java.jclass('DisaggregationCalculator')( jd(lat_bin_lims), jd(lon_bin_lims), jd(mag_bin_lims), jd(eps_bin_lims)) cache = java.jclass('KVS')( config.get('kvs', 'host'), int(config.get('kvs', 'port'))) erf = generate_erf(job_id, cache) gmpe_map = generate_gmpe_map(job_id, cache) set_gmpe_params(gmpe_map, the_job.params) imls = get_iml_list(the_job['INTENSITY_MEASURE_LEVELS'], the_job['INTENSITY_MEASURE_TYPE']) vs30_type = the_job['VS30_TYPE'] vs30_value = the_job['REFERENCE_VS30_VALUE'] depth_to_1pt0 = the_job['DEPTHTO1PT0KMPERSEC'] depth_to_2pt5 = the_job['REFERENCE_DEPTH_TO_2PT5KM_PER_SEC_PARAM'] matrix_result = disagg_calc.computeMatrix( site.latitude, site.longitude, erf, gmpe_map, poe, imls, vs30_type, vs30_value, depth_to_1pt0, depth_to_2pt5) matrix_path = save_5d_matrix_to_h5(result_dir, numpy.array(matrix_result.getMatrix())) return (matrix_result.getGMV(), matrix_path)
def preloader(self, *args, **kwargs): """Validate job""" self.cache = java.jclass("KVS")( config.get("kvs", "host"), int(config.get("kvs", "port"))) self.calc = java.jclass("LogicTreeProcessor")( self.cache, self.key) java.jvm().java.lang.System.setProperty("openquake.nrml.schema", xml.nrml_schema_file()) return fn(self, *args, **kwargs)
def preloader(self, *args, **kwargs): """Validate job""" self.cache = java.jclass("KVS")( settings.KVS_HOST, settings.KVS_PORT) self.calc = java.jclass("LogicTreeProcessor")( self.cache, self.key) java.jvm().java.lang.System.setProperty("openquake.nrml.schema", xml.nrml_schema_file()) return fn(self, *args, **kwargs)
def decorated(self, *args, **kwargs): # pylint: disable=C0111 kvs_data = (config.get("kvs", "host"), int(config.get("kvs", "port"))) if kvs.cache_connections(): key = hashlib.md5(repr(kvs_data)).hexdigest() if key not in __KVS_CONN_CACHE: __KVS_CONN_CACHE[key] = java.jclass("KVS")(*kvs_data) self.cache = __KVS_CONN_CACHE[key] else: self.cache = java.jclass("KVS")(*kvs_data) return fn(self, *args, **kwargs)
def compute_ground_motion_field(self, _random_generator): """Stubbed version of the method that computes the ground motion field calling java stuff.""" hashmap = java.jclass("HashMap")() for site in self.sites_to_compute(): location = java.jclass("Location")(site.latitude, site.longitude) site = java.jclass("Site")(location) hashmap.put(site, 0.5) return hashmap
def compute_ground_motion_field(self, _random_generator): """Stubbed version of the method that computes the ground motion field calling java stuff.""" hashmap = java.jclass("HashMap")() for site in self.job_ctxt.sites_to_compute(): location = java.jclass("Location")(site.latitude, site.longitude) site = java.jclass("Site")(location) hashmap.put(site, 0.5) return hashmap
def compute_uhs(the_job, site): """Given a `JobContext` and a site of interest, compute UHS. The Java `UHSCalculator` is called to do perform the core computation. :param the_job: :class:`openquake.engine.JobContext` instance. :param site: :class:`openquake.shapes.Site` instance. :returns: An `ArrayList` (Java object) of `UHSResult` objects, one per PoE. """ periods = list_to_jdouble_array(the_job['UHS_PERIODS']) poes = list_to_jdouble_array(the_job['POES']) imls = general.get_iml_list(the_job['INTENSITY_MEASURE_LEVELS'], the_job['INTENSITY_MEASURE_TYPE']) max_distance = the_job['MAXIMUM_DISTANCE'] cache = java.jclass('KVS')( config.get('kvs', 'host'), int(config.get('kvs', 'port'))) erf = general.generate_erf(the_job.job_id, cache) gmpe_map = general.generate_gmpe_map(the_job.job_id, cache) general.set_gmpe_params(gmpe_map, the_job.params) uhs_calc = java.jclass('UHSCalculator')(periods, poes, imls, erf, gmpe_map, max_distance) site_model = general.get_site_model(the_job.oq_job.id) if site_model is not None: sm_data = general.get_closest_site_model_data(site_model, site) vs30_type = sm_data.vs30_type.capitalize() vs30 = sm_data.vs30 z1pt0 = sm_data.z1pt0 z2pt5 = sm_data.z2pt5 else: jp = the_job.oq_job_profile vs30_type = jp.vs30_type.capitalize() vs30 = jp.reference_vs30_value z1pt0 = jp.depth_to_1pt_0km_per_sec z2pt5 = jp.reference_depth_to_2pt5km_per_sec_param uhs_results = _compute_uhs( uhs_calc, site.latitude, site.longitude, vs30_type, vs30, z1pt0, z2pt5 ) return uhs_results
def compute_ground_motion_fields(self, site_list, history, realization, seed): """Ground motion field calculation, runs on the workers.""" jpype = java.jvm() jsite_list = self.parameterize_sites(site_list) key = kvs.tokens.stochastic_set_key(self.job_ctxt.job_id, history, realization) correlate = self.job_ctxt['GROUND_MOTION_CORRELATION'] stochastic_set_id = "%s!%s" % (history, realization) java.jclass("HazardCalculator").generateAndSaveGMFs( self.cache, key, stochastic_set_id, jsite_list, self.generate_erf(), self.generate_gmpe_map(), java.jclass("Random")(seed), jpype.JBoolean(correlate))
def compute_ground_motion_fields(self, site_list, stochastic_set_id, seed): """Ground motion field calculation, runs on the workers.""" jpype = java.jvm() jsite_list = self.parameterize_sites(site_list) key = kvs.generate_product_key(self.id, kvs.tokens.STOCHASTIC_SET_TOKEN, stochastic_set_id) gmc = self.params['GROUND_MOTION_CORRELATION'] correlate = (gmc == "true" and True or False) java.jclass("HazardCalculator").generateAndSaveGMFs( self.cache, key, stochastic_set_id, jsite_list, self.generate_erf(), self.generate_gmpe_map(), java.jclass("Random")(seed), jpype.JBoolean(correlate))
def compute_ground_motion_fields(self, site_list, stochastic_set_id, seed): """Ground motion field calculation, runs on the workers.""" jpype = java.jvm() jsite_list = self.parameterize_sites(site_list) key = kvs.generate_product_key( self.id, kvs.tokens.STOCHASTIC_SET_TOKEN, stochastic_set_id) gmc = self.params['GROUND_MOTION_CORRELATION'] correlate = (gmc == "true" and True or False) java.jclass("HazardCalculator").generateAndSaveGMFs( self.cache, key, stochastic_set_id, jsite_list, self.generate_erf(), self.generate_gmpe_map(), java.jclass("Random")(seed), jpype.JBoolean(correlate))
def generate_gmpe_map(self): """Generate the GMPE map from the stored GMPE logic tree.""" key = kvs.generate_product_key(self.id, kvs.tokens.GMPE_TOKEN) gmpe_map = java.jclass("JsonSerializer").getGmpeMapFromCache( self.cache, key) self.set_gmpe_params(gmpe_map) return gmpe_map
def __init__(self, src_model_path, mfd_bin_width=DEFAULT_MFD_BIN_WIDTH, owner_id=1, input_id=None): """ :param src_model_path: path to a source model file :type src_model_path: str :param mfd_bin_width: Magnitude Frequency Distribution bin width :type mfd_bin_width: float :param owner_id: ID of an admin.organization entity in the database. By default, the default 'GEM Foundation' group will be used. Note(LB): This is kind of ugly and needs to be revisited later. :param int input_id: The database key of the uploaded input file from which this source was extracted. Please note that the `input_id` will only be supplied when uploading source model files via the GUI. """ self.src_model_path = src_model_path self.mfd_bin_width = mfd_bin_width self.owner_id = owner_id self.input_id = input_id # Java SourceModelReader object self.src_reader = java.jclass('SourceModelReader')(self.src_model_path, self.mfd_bin_width)
def __init__(self, src_model_path, mfd_bin_width=DEFAULT_MFD_BIN_WIDTH, owner_id=1, input_id=None): """ :param src_model_path: path to a source model file :type src_model_path: str :param mfd_bin_width: Magnitude Frequency Distribution bin width :type mfd_bin_width: float :param owner_id: ID of an admin.organization entity in the database. By default, the default 'GEM Foundation' group will be used. Note(LB): This is kind of ugly and needs to be revisited later. :param int input_id: The database key of the uploaded input file from which this source was extracted. Please note that the `input_id` will only be supplied when uploading source model files via the GUI. """ self.src_model_path = src_model_path self.mfd_bin_width = mfd_bin_width self.owner_id = owner_id self.input_id = input_id # Java SourceModelReader object self.src_reader = java.jclass('SourceModelReader')( self.src_model_path, self.mfd_bin_width)
def build_simple_fault_insert(fault): """ Build an :class:`openquake.db.models.SimpleFault` Django model. See the documentation for :py:function:`parse_simple_fault_src` for more information. """ simple_fault = models.SimpleFault() simple_fault.name = fault.getName() simple_fault.gid = fault.getID() simple_fault.dip = fault.getDip() simple_fault.upper_depth = fault.getSeismDepthUpp() simple_fault.lower_depth = fault.getSeismDepthLow() # coords are ordered as lon/lat/depth point_str_3d = lambda pt: \ ' '.join([ str(pt.getLongitude()), str(pt.getLatitude()), str(pt.getDepth())]) coord_list = lambda point_list: \ ', '.join([point_str_3d(point) for point in point_list]) trace = fault.getTrace() trace_coords = coord_list(trace) simple_fault.edge = 'SRID=4326;LINESTRING(%s)' % trace_coords surface = get_fault_surface(fault) location_list = surface.getSurfacePerimeterLocsList() formatter = java.jclass("LocationListFormatter")(location_list) outline_coords = formatter.format() simple_fault.outline = 'SRID=4326;POLYGON((%s))' % outline_coords return simple_fault
def execute(self): """Entry point to trigger the computation.""" random_generator = java.jclass("Random")(int( self.job_ctxt.params["GMF_RANDOM_SEED"])) encoder = json.JSONEncoder() kvs_client = kvs.get_client() num_calculations = self._number_of_calculations() self.initialize_pr_data(num_calculations=num_calculations) for cnum in xrange(num_calculations): try: gmf = self.compute_ground_motion_field(random_generator) stats.pk_inc(self.job_ctxt.job_id, "nhzrd_done", 1) except: # Count failure stats.pk_inc(self.job_ctxt.job_id, "nhzrd_failed", 1) raise logs.log_percent_complete(self.job_ctxt.job_id, "hazard") imt = self.job_ctxt.params["INTENSITY_MEASURE_TYPE"] self._serialize_gmf(gmf, imt, cnum) for gmv in gmf_to_dict(gmf, imt): site = shapes.Site(gmv["site_lon"], gmv["site_lat"]) key = kvs.tokens.ground_motion_values_key( self.job_ctxt.job_id, site) kvs_client.rpush(key, encoder.encode(gmv))
def execute(self): """Entry point to trigger the computation.""" random_generator = java.jclass( "Random")(int(self.job_ctxt.params["GMF_RANDOM_SEED"])) encoder = json.JSONEncoder() kvs_client = kvs.get_client() num_calculations = self._number_of_calculations() self.initialize_pr_data(num_calculations=num_calculations) for cnum in xrange(num_calculations): try: gmf = self.compute_ground_motion_field(random_generator) stats.pk_inc(self.job_ctxt.job_id, "nhzrd_done", 1) except: # Count failure stats.pk_inc(self.job_ctxt.job_id, "nhzrd_failed", 1) raise logs.log_percent_complete(self.job_ctxt.job_id, "hazard") imt = self.job_ctxt.params["INTENSITY_MEASURE_TYPE"] self._serialize_gmf(gmf, imt, cnum) for gmv in gmf_to_dict(gmf, imt): site = shapes.Site(gmv["site_lon"], gmv["site_lat"]) key = kvs.tokens.ground_motion_values_key( self.job_ctxt.job_id, site) kvs_client.rpush(key, encoder.encode(gmv))
def execute(self): """Entry point to trigger the computation.""" random_generator = java.jclass( "Random")(int(self.params["GMF_RANDOM_SEED"])) encoder = json.JSONEncoder() kvs_client = kvs.get_client(binary=False) grid = self.region.grid for _ in xrange(self._number_of_calculations()): gmf = self.compute_ground_motion_field(random_generator) for gmv in gmf_to_dict( gmf, self.params["INTENSITY_MEASURE_TYPE"]): site = shapes.Site(gmv["site_lon"], gmv["site_lat"]) point = grid.point_at(site) key = kvs.tokens.ground_motion_values_key( self.job_id, point) kvs_client.rpush(key, encoder.encode(gmv)) return [True]
def compute_hazard_curve(self, site_list, realization): """ Compute hazard curves, write them to KVS as JSON, and return a list of the KVS keys for each curve. """ jsite_list = self.parameterize_sites(site_list) hazard_curves = java.jclass("HazardCalculator").getHazardCurvesAsJson( jsite_list, self.generate_erf(), self.generate_gmpe_map(), self.get_iml_list(), float(self.params['MAXIMUM_DISTANCE'])) # write the curves to the KVS and return a list of the keys kvs_client = kvs.get_client() curve_keys = [] for i in xrange(0, len(hazard_curves)): curve = hazard_curves[i] site = site_list[i] lon = site.longitude lat = site.latitude curve_key = kvs.tokens.hazard_curve_key(self.id, realization, lon, lat) kvs_client.set(curve_key, curve) curve_keys.append(curve_key) return curve_keys
def generate_gmpe_map(self): """Generate the GMPE map from the stored GMPE logic tree.""" key = kvs.generate_product_key(self.id, kvs.tokens.GMPE_TOKEN) gmpe_map = java.jclass( "JsonSerializer").getGmpeMapFromCache(self.cache, key) self.set_gmpe_params(gmpe_map) return gmpe_map
def __init__(self, src_model_path, engine, mfd_bin_width=DEFAULT_MFD_BIN_WIDTH, owner_id=1, input_id=None): """ :param src_model_path: path to a source model file :type src_model_path: str :param engine: db engine to provide connectivity and reflection :type engine: :py:class:`sqlalchemy.engine.base.Engine` :param mfd_bin_width: Magnitude Frequency Distribution bin width :type mfd_bin_width: float :param owner_id: ID of an admin.organization entity in the database. By default, the default 'GEM Foundation' group will be used. Note(LB): This is kind of ugly and needs to be revisited later. :param int input_id: The database key of the uploaded input file from which this source was extracted. Please note that the `input_id` will only be supplied when uploading source model files via the GUI. """ self.src_model_path = src_model_path self.engine = engine self.mfd_bin_width = mfd_bin_width self.owner_id = owner_id self.input_id = input_id # Java SourceModelReader object self.src_reader = java.jclass('SourceModelReader')( self.src_model_path, self.mfd_bin_width) self.meta = sqlalchemy.MetaData(engine) self.meta.reflect(schema=db.PSHAI_TS)
def set_gmpe_params(gmpe_map, params): """Push parameters from the config file into the GMPE objects. :param gmpe_map: jpype instance of `HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI>` :param dict params: job config params """ jpype = java.jvm() jd_float = lambda x: jpype.JDouble(float(x)) component = params.get("COMPONENT") imt = params.get("INTENSITY_MEASURE_TYPE") # PERIOD is not used in UHS calculations. period = jd_float(params.get("PERIOD")) if params.get("PERIOD") is not None else None damping = jd_float(params.get("DAMPING")) gmpe_trunc_type = params.get("GMPE_TRUNCATION_TYPE") trunc_level = jd_float(params.get("TRUNCATION_LEVEL")) stddev_type = params.get("STANDARD_DEVIATION_TYPE") j_set_gmpe_params = java.jclass("GmpeLogicTreeData").setGmpeParams for tect_region in gmpe_map.keySet(): gmpe = gmpe_map.get(tect_region) # There are two overloads for this method; one with 'period'... if period is not None: j_set_gmpe_params( component, imt, period, damping, gmpe_trunc_type, trunc_level, stddev_type, jpype.JObject(gmpe, java.jclass("AttenuationRelationship")), ) # ... and one without. else: j_set_gmpe_params( component, imt, damping, gmpe_trunc_type, trunc_level, stddev_type, jpype.JObject(gmpe, java.jclass("AttenuationRelationship")), ) gmpe_map.put(tect_region, gmpe)
def test__prepare_gmf_serialization_with_mmi(self): # In case of imt == mmi the GMF values are left unchanged location1 = java.jclass("Location")(1.0, 2.0) location2 = java.jclass("Location")(1.1, 2.1) site1 = java.jclass("Site")(location1) site2 = java.jclass("Site")(location2) hashmap = java.jclass("HashMap")() hashmap.put(site1, 0.1) hashmap.put(site2, 0.2) expected = {shapes.Site(2.0, 1.0): {"groundMotion": 0.1}, shapes.Site(2.1, 1.1): {"groundMotion": 0.2}} actual = scenario._prepare_gmf_serialization(hashmap, "MMI") self.assertEqual(expected, actual)
def get_iml_list(self): """Build the appropriate Arbitrary Discretized Func from the IMLs, based on the IMT""" iml_list = java.jclass("ArrayList")() for val in self.params["INTENSITY_MEASURE_LEVELS"].split(","): iml_list.add(IML_SCALING[self.params["INTENSITY_MEASURE_TYPE"]](float(val))) return iml_list
def test__prepare_gmf_serialization_with_imt_other_than_mmi(self): # In case of imt != mmi the GMF values are transformed as needed. location1 = java.jclass("Location")(1.0, 2.0) location2 = java.jclass("Location")(1.1, 2.1) site1 = java.jclass("Site")(location1) site2 = java.jclass("Site")(location2) hashmap = java.jclass("HashMap")() hashmap.put(site1, 0.1) hashmap.put(site2, 0.2) expected = {shapes.Site(2.0, 1.0): {"groundMotion": math.exp(0.1)}, shapes.Site(2.1, 1.1): {"groundMotion": math.exp(0.2)}} actual = scenario._prepare_gmf_serialization(hashmap, "PGA") self.assertEqual(expected, actual)
def get_iml_list(self): """Build the appropriate Arbitrary Discretized Func from the IMLs, based on the IMT""" iml_list = java.jclass("ArrayList")() for val in self.params['INTENSITY_MEASURE_LEVELS'].split(","): iml_list.add(IML_SCALING[self.params['INTENSITY_MEASURE_TYPE']]( float(val))) return iml_list
def __init__(self, *args, **kwargs): """ One-time setup stuff for this entire test case class. """ super(NrmlModelLoaderTestCase, self).__init__(*args, **kwargs) self.src_reader = java.jclass('SourceModelReader')( TEST_SRC_FILE, db_loader.SourceModelLoader.DEFAULT_MFD_BIN_WIDTH) self.sources = self.src_reader.read() self.simple, self.complex, self.area, self.point = self.sources
def get_iml_list(self): """Build the appropriate Arbitrary Discretized Func from the IMLs, based on the IMT""" iml_list = java.jclass("ArrayList")() for val in self.imls: iml_list.add( IML_SCALING[self.params['INTENSITY_MEASURE_TYPE']]( val)) return iml_list
def __init__(self, *args, **kwargs): """ One-time setup stuff for this entire test case class. """ super(NrmlModelLoaderTestCase, self).__init__(*args, **kwargs) self.src_reader = java.jclass('SourceModelReader')( TEST_SRC_FILE, db_loader.SourceModelLoader.DEFAULT_MFD_BIN_WIDTH) self.sources = self.src_reader.read() # the last source in the file is also simple fault, # just with different mfd, skipping it self.simple, self.complex, self.area, self.point, _ = self.sources
def generate_erf(job_id, cache): """ Generate the Earthquake Rupture Forecast from the source model data stored in the KVS. :param int job_id: id of the job :param cache: jpype instance of `org.gem.engine.hazard.redis.Cache` :returns: jpype instance of `org.opensha.sha.earthquake.rupForecastImpl.GEM1.GEM1ERF` """ src_key = kvs.tokens.source_model_key(job_id) job_key = kvs.tokens.generate_job_key(job_id) sources = java.jclass("JsonSerializer").getSourceListFromCache( cache, src_key) erf = java.jclass("GEM1ERF")(sources) calc = java.jclass("LogicTreeProcessor")(cache, job_key) calc.setGEM1ERFParams(erf) return erf
def parameterize_sites(self, site_list): """Convert python Sites to Java Sites, and add default parameters.""" # TODO(JMC): There's Java code for this already, sets each site to have # the same default parameters jpype = java.jvm() jsite_list = java.jclass("ArrayList")() for x in site_list: site = x.to_java() vs30 = java.jclass("DoubleParameter")(jpype.JString("Vs30")) vs30.setValue(float(self.params['REFERENCE_VS30_VALUE'])) depth25 = java.jclass("DoubleParameter")("Depth 2.5 km/sec") depth25.setValue( float(self.params['REFERENCE_DEPTH_TO_2PT5KM_PER_SEC_PARAM'])) sadigh = java.jclass("StringParameter")("Sadigh Site Type") sadigh.setValue(self.params['SADIGH_SITE_TYPE']) site.addParameter(vs30) site.addParameter(depth25) site.addParameter(sadigh) jsite_list.add(site) return jsite_list
def generate_gmpe_map(job_id, cache): """ Generate the GMPE map from the GMPE data stored in the KVS. :param int job_id: id of the job :param cache: jpype instance of `org.gem.engine.hazard.redis.Cache` :returns: jpype instace of `HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI>` """ gmpe_key = kvs.tokens.gmpe_key(job_id) gmpe_map = java.jclass( "JsonSerializer").getGmpeMapFromCache(cache, gmpe_key) return gmpe_map
def test__prepare_gmf_serialization_with_imt_other_than_mmi(self): # In case of imt != mmi the GMF values are transformed as needed. location1 = java.jclass("Location")(1.0, 2.0) location2 = java.jclass("Location")(1.1, 2.1) site1 = java.jclass("Site")(location1) site2 = java.jclass("Site")(location2) hashmap = java.jclass("HashMap")() hashmap.put(site1, 0.1) hashmap.put(site2, 0.2) expected = { shapes.Site(2.0, 1.0): { "groundMotion": math.exp(0.1) }, shapes.Site(2.1, 1.1): { "groundMotion": math.exp(0.2) } } actual = scenario._prepare_gmf_serialization(hashmap, "PGA") self.assertEqual(expected, actual)
def rupture_model(self): """Load the rupture model specified in the configuration file. The key used in the configuration file is SINGLE_RUPTURE_MODEL. :returns: jpype wrapper around an instance of org.opensha.sha.earthquake.EqkRupture. """ rel_path = self.params["SINGLE_RUPTURE_MODEL"] abs_path = os.path.join(self.params["BASE_PATH"], rel_path) grid_spacing = float(self.params["RUPTURE_SURFACE_DISCRETIZATION"]) return java.jclass("RuptureReader")(abs_path, grid_spacing).read()
def test__prepare_gmf_serialization_with_mmi(self): # In case of imt == mmi the GMF values are left unchanged location1 = java.jclass("Location")(1.0, 2.0) location2 = java.jclass("Location")(1.1, 2.1) site1 = java.jclass("Site")(location1) site2 = java.jclass("Site")(location2) hashmap = java.jclass("HashMap")() hashmap.put(site1, 0.1) hashmap.put(site2, 0.2) expected = { shapes.Site(2.0, 1.0): { "groundMotion": 0.1 }, shapes.Site(2.1, 1.1): { "groundMotion": 0.2 } } actual = scenario._prepare_gmf_serialization(hashmap, "MMI") self.assertEqual(expected, actual)
def set_gmpe_params(gmpe_map, params): """Push parameters from the config file into the GMPE objects. :param gmpe_map: jpype instance of `HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI>` :param dict params: job config params """ jpype = java.jvm() jd_float = lambda x: jpype.JDouble(float(x)) component = params.get('COMPONENT') imt = params.get('INTENSITY_MEASURE_TYPE') # PERIOD is not used in UHS calculations. period = (jd_float(params.get('PERIOD')) if params.get('PERIOD') is not None else None) damping = jd_float(params.get('DAMPING')) gmpe_trunc_type = params.get('GMPE_TRUNCATION_TYPE') trunc_level = jd_float(params.get('TRUNCATION_LEVEL')) stddev_type = params.get('STANDARD_DEVIATION_TYPE') j_set_gmpe_params = java.jclass("GmpeLogicTreeData").setGmpeParams for tect_region in gmpe_map.keySet(): gmpe = gmpe_map.get(tect_region) # There are two overloads for this method; one with 'period'... if period is not None: j_set_gmpe_params( component, imt, period, damping, gmpe_trunc_type, trunc_level, stddev_type, jpype.JObject(gmpe, java.jclass("AttenuationRelationship"))) # ... and one without. else: j_set_gmpe_params( component, imt, damping, gmpe_trunc_type, trunc_level, stddev_type, jpype.JObject(gmpe, java.jclass("AttenuationRelationship"))) gmpe_map.put(tect_region, gmpe)
def compute_hazard_curve(self, sites, realization): """ Compute hazard curves, write them to KVS as JSON, and return a list of the KVS keys for each curve. """ jpype = java.jvm() try: calc = java.jclass("HazardCalculator") poes_list = calc.getHazardCurvesAsJson( self.parameterize_sites(sites), self.generate_erf(), self.generate_gmpe_map(), general.get_iml_list( self.job_ctxt.imls, self.job_ctxt.params['INTENSITY_MEASURE_TYPE']), self.job_ctxt['MAXIMUM_DISTANCE']) except jpype.JavaException, ex: unwrap_validation_error(jpype, ex)
def build_simple_fault_insert(fault): """ Build up the simple fault dict. See the documentation for :py:function:`parse_simple_fault_src` for more information. """ simple_fault = db.SIMPLE_FAULT.copy() simple_fault['name'] = fault.getName() simple_fault['gid'] = fault.getID() simple_fault['dip'] = fault.getDip() simple_fault['upper_depth'] = fault.getSeismDepthUpp() simple_fault['lower_depth'] = fault.getSeismDepthLow() trace = fault.getTrace() # coords are ordered as lon/lat/depth point_str_3d = lambda pt: \ ' '.join([ str(pt.getLongitude()), str(pt.getLatitude()), str(pt.getDepth())]) coord_list = lambda point_list: \ ', '.join([point_str_3d(point) for point in point_list]) trace_coords = coord_list(trace) simple_fault['edge'] = \ geoalchemy.WKTSpatialElement( 'SRID=4326;LINESTRING(%s)' % trace_coords) surface = get_fault_surface(fault) location_list = surface.getSurfacePerimeterLocsList() formatter = java.jclass("LocationListFormatter")(location_list) outline_coords = formatter.format() simple_fault['outline'] = \ geoalchemy.WKTSpatialElement( 'SRID=4326;POLYGON((%s))' % outline_coords) simple_fault_insert = { 'table': '%s.simple_fault' % db.PSHAI_TS, 'data': simple_fault } return simple_fault_insert