def setUp(self): """ """ mfd_1 = TruncatedGRMFD(4.5, 8.0, 0.1, 4.0, 1.0) mfd_2 = TruncatedGRMFD(4.5, 7.5, 0.1, 3.5, 1.1) self.source_model = [ PointSource('001', 'Point1', 'Active Shallow Crust', mfd_1, 1.0, WC1994(), 1.0, PoissonTOM(50.0), 0.0, 30.0, Point(30.0, 30.5), PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))]), PMF([(1.0, 10.0)])), PointSource('002', 'Point2', 'Active Shallow Crust', mfd_2, 1.0, WC1994(), 1.0, PoissonTOM(50.0), 0.0, 30.0, Point(30.0, 30.5), PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))]), PMF([(1.0, 10.0)])) ] self.sites = SiteCollection([ Site(Point(30.0, 30.0), 760., True, 1.0, 1.0, 1), Site(Point(30.25, 30.25), 760., True, 1.0, 1.0, 2), Site(Point(30.4, 30.4), 760., True, 1.0, 1.0, 2) ]) self.gsims = {'Active Shallow Crust': 'AkkarBommer2010'} self.imts = ['PGA', 'SA(0.5)'] self.imls = [[0.01, 0.1, 0.2, 0.5, 0.8]]
def example_calc(apply): sitecol = SiteCollection([ Site(Point(30.0, 30.0), 760., 1.0, 1.0), Site(Point(30.25, 30.25), 760., 1.0, 1.0), Site(Point(30.4, 30.4), 760., 1.0, 1.0) ]) mfd_1 = TruncatedGRMFD(4.5, 8.0, 0.1, 4.0, 1.0) mfd_2 = TruncatedGRMFD(4.5, 7.5, 0.1, 3.5, 1.1) sources = [ PointSource('001', 'Point1', 'Active Shallow Crust', mfd_1, 1.0, WC1994(), 1.0, PoissonTOM(50.0), 0.0, 30.0, Point(30.0, 30.5), PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))]), PMF([(1.0, 10.0)])), PointSource('002', 'Point2', 'Active Shallow Crust', mfd_2, 1.0, WC1994(), 1.0, PoissonTOM(50.0), 0.0, 30.0, Point(30.0, 30.5), PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))]), PMF([(1.0, 10.0)])) ] imtls = { 'PGA': [0.01, 0.1, 0.2, 0.5, 0.8], 'SA(0.5)': [0.01, 0.1, 0.2, 0.5, 0.8] } gsims = {'Active Shallow Crust': AkkarBommer2010()} return calc_hazard_curves(sources, sitecol, imtls, gsims, apply=apply, filter_distance='rrup')
def reference_psha_calculation_openquake(): """ Sets up the reference PSHA calculation calling OpenQuake directly. All subsequent implementations should match this example """ # Site model - 3 Sites site_model = SiteCollection([ Site(Point(30.0, 30.0), 760., True, 1.0, 1.0, 1), Site(Point(30.25, 30.25), 760., True, 1.0, 1.0, 2), Site(Point(30.4, 30.4), 760., True, 1.0, 1.0, 2) ]) # Source Model Two Point Sources mfd_1 = TruncatedGRMFD(4.5, 8.0, 0.1, 4.0, 1.0) mfd_2 = TruncatedGRMFD(4.5, 7.5, 0.1, 3.5, 1.1) source_model = [ PointSource('001', 'Point1', 'Active Shallow Crust', mfd_1, 1.0, WC1994(), 1.0, PoissonTOM(50.0), 0.0, 30.0, Point(30.0, 30.5), PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))]), PMF([(1.0, 10.0)])), PointSource('002', 'Point2', 'Active Shallow Crust', mfd_2, 1.0, WC1994(), 1.0, PoissonTOM(50.0), 0.0, 30.0, Point(30.0, 30.5), PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))]), PMF([(1.0, 10.0)])) ] imts = { 'PGA': [0.01, 0.1, 0.2, 0.5, 0.8], 'SA(0.5)': [0.01, 0.1, 0.2, 0.5, 0.8] } # Akkar & Bommer (2010) GMPE gsims = {'Active Shallow Crust': gsim.akkar_bommer_2010.AkkarBommer2010()} truncation_level = None return calc_hazard_curves(source_model, site_model, imts, gsims, truncation_level)
def _get_rupture(self, min_mag, max_mag, hypocenter_depth, aspect_ratio, dip, rupture_mesh_spacing, upper_seismogenic_depth=2, lower_seismogenic_depth=16): source_id = name = 'test-source' trt = TRT.ACTIVE_SHALLOW_CRUST mfd = TruncatedGRMFD(a_val=2, b_val=1, min_mag=min_mag, max_mag=max_mag, bin_width=1) location = Point(0, 0) nodal_plane = NodalPlane(strike=45, dip=dip, rake=-123.23) nodal_plane_distribution = PMF([(1, nodal_plane)]) hypocenter_distribution = PMF([(1, hypocenter_depth)]) magnitude_scaling_relationship = PeerMSR() rupture_aspect_ratio = aspect_ratio tom = PoissonTOM(time_span=50) point_source = PointSource( source_id, name, trt, mfd, rupture_mesh_spacing, magnitude_scaling_relationship, rupture_aspect_ratio, tom, upper_seismogenic_depth, lower_seismogenic_depth, location, nodal_plane_distribution, hypocenter_distribution ) ruptures = list(point_source.iter_ruptures()) self.assertEqual(len(ruptures), 1) [rupture] = ruptures self.assertIs(rupture.temporal_occurrence_model, tom) self.assertIs(rupture.tectonic_region_type, trt) self.assertEqual(rupture.rake, nodal_plane.rake) self.assertIsInstance(rupture.surface, PlanarSurface) return rupture
def get_background_sources(self, src_filter): """ Turn the background model of a given branch into a set of point sources :param src_filter: SourceFilter instance """ background_sids = self.get_background_sids(src_filter) with h5py.File(self.source_file, "r") as hdf5: grid_loc = "/".join(["Grid", self.idx_set["grid_key"]]) mags = hdf5[grid_loc + "/Magnitude"].value mmax = hdf5[grid_loc + "/MMax"][background_sids] rates = hdf5[grid_loc + "/RateArray"][background_sids, :] locations = hdf5["Grid/Locations"][background_sids, :] sources = [] for i, bg_idx in enumerate(background_sids): src_id = "_".join([self.idx_set["grid_key"], str(bg_idx)]) src_name = "|".join([self.idx_set["total_key"], str(bg_idx)]) # Get MFD mag_idx = numpy.logical_and(mags >= self.min_mag, mags < mmax[i]) src_mags = mags[mag_idx] src_rates = rates[i, :] src_mfd = EvenlyDiscretizedMFD(src_mags[0], src_mags[1] - src_mags[0], src_rates[mag_idx].tolist()) ps = PointSource(src_id, src_name, self.tectonic_region_type, src_mfd, self.mesh_spacing, self.msr, self.aspect, self.tom, self.usd, self.lsd, Point(locations[i, 0], locations[i, 1]), self.npd, self.hdd) ps.src_group_id = self.src_group_id sources.append(ps) return sources
def make_point_source(lon=1.2, lat=3.4, **kwargs): default_arguments = { 'source_id': 'source_id', 'name': 'source name', 'tectonic_region_type': TRT.SUBDUCTION_INTRASLAB, 'mfd': TruncatedGRMFD(a_val=1, b_val=2, min_mag=3, max_mag=5, bin_width=1), 'location': Point(lon, lat, 5.6), 'nodal_plane_distribution': PMF([(1, NodalPlane(1, 2, 3))]), 'hypocenter_distribution': PMF([(1, 4)]), 'upper_seismogenic_depth': 1.3, 'lower_seismogenic_depth': 4.9, 'magnitude_scaling_relationship': PeerMSR(), 'rupture_aspect_ratio': 1.333, 'rupture_mesh_spacing': 1.234, 'temporal_occurrence_model': PoissonTOM(50.) } default_arguments.update(kwargs) kwargs = default_arguments ps = PointSource(**kwargs) assert_pickleable(ps) return ps
def get_background_sources(self): """ Turn the background model of a given branch into a set of point sources """ background_sids = self.get_background_sids() with h5py.File(self.source_file, "r") as hdf5: grid_loc = "/".join(["Grid", self.ukey["grid_key"]]) # for instance Grid/FM0_0_MEANFS_MEANMSR_MeanRates mags = hdf5[grid_loc + "/Magnitude"][()] mmax = hdf5[grid_loc + "/MMax"][background_sids] rates = hdf5[grid_loc + "/RateArray"][background_sids, :] locations = hdf5["Grid/Locations"][background_sids, :] sources = [] for i, bg_idx in enumerate(background_sids): src_id = "_".join([self.ukey["grid_key"], str(bg_idx)]) src_name = "|".join([self.ukey["total_key"], str(bg_idx)]) mag_idx = (self.min_mag <= mags) & (mags < mmax[i]) src_mags = mags[mag_idx] src_mfd = EvenlyDiscretizedMFD(src_mags[0], src_mags[1] - src_mags[0], rates[i, mag_idx].tolist()) ps = PointSource(src_id, src_name, self.tectonic_region_type, src_mfd, self.mesh_spacing, self.msr, self.aspect, self.tom, self.usd, self.lsd, Point(locations[i, 0], locations[i, 1]), self.npd, self.hdd) ps.checksum = zlib.adler32(pickle.dumps(vars(ps), protocol=4)) ps._wkt = ps.wkt() ps.id = self.id ps.et_id = self.et_id ps.num_ruptures = ps.count_ruptures() ps.nsites = 1 # anything <> 0 goes sources.append(ps) return sources
def __iter__(self): """ Split an area source into a generator of point sources. MFDs will be rescaled appropriately for the number of points in the area mesh. """ mesh = self.polygon.discretize(self.area_discretization) num_points = len(mesh) area_mfd = self.mfd if isinstance(area_mfd, mfd.TruncatedGRMFD): new_mfd = mfd.TruncatedGRMFD(a_val=area_mfd.a_val - math.log10(num_points), b_val=area_mfd.b_val, bin_width=area_mfd.bin_width, min_mag=area_mfd.min_mag, max_mag=area_mfd.max_mag) elif isinstance(area_mfd, mfd.EvenlyDiscretizedMFD): new_occur_rates = [ x / num_points for x in area_mfd.occurrence_rates ] new_mfd = mfd.EvenlyDiscretizedMFD( min_mag=area_mfd.min_mag, bin_width=area_mfd.bin_width, occurrence_rates=new_occur_rates) elif isinstance(area_mfd, mfd.ArbitraryMFD): new_occur_rates = [ x / num_points for x in area_mfd.occurrence_rates ] new_mfd = mfd.ArbitraryMFD(magnitudes=area_mfd.magnitudes, occurrence_rates=new_occur_rates) elif isinstance(area_mfd, mfd.YoungsCoppersmith1985MFD): new_mfd = mfd.YoungsCoppersmith1985MFD.from_characteristic_rate( area_mfd.min_mag, area_mfd.b_val, area_mfd.char_mag, area_mfd.char_rate / num_points, area_mfd.bin_width) else: raise TypeError('Unknown MFD: %s' % area_mfd) for i, (lon, lat) in enumerate(zip(mesh.lons, mesh.lats)): pt = PointSource( # Generate a new ID and name source_id='%s:%s' % (self.source_id, i), name=self.name, tectonic_region_type=self.tectonic_region_type, mfd=new_mfd, rupture_mesh_spacing=self.rupture_mesh_spacing, magnitude_scaling_relationship=self. magnitude_scaling_relationship, rupture_aspect_ratio=self.rupture_aspect_ratio, upper_seismogenic_depth=self.upper_seismogenic_depth, lower_seismogenic_depth=self.lower_seismogenic_depth, location=geo.Point(lon, lat), nodal_plane_distribution=self.nodal_plane_distribution, hypocenter_distribution=self.hypocenter_distribution, temporal_occurrence_model=self.temporal_occurrence_model) pt.num_ruptures = pt.count_ruptures() yield pt
def __iter__(self): for i, (mfd, point) in enumerate(zip(self.mfd, self.mesh)): name = '%s:%s' % (self.source_id, i) ps = PointSource( name, name, self.tectonic_region_type, mfd, self.rupture_mesh_spacing, self.magnitude_scaling_relationship, self.rupture_aspect_ratio, self.temporal_occurrence_model, self.upper_seismogenic_depth, self.lower_seismogenic_depth, point, self.nodal_plane_distribution, self.hypocenter_distribution) yield ps
def test_rupture_close_to_south_pole(self): # data taken from real example and causing "surface's angles are not # right" error mfd = EvenlyDiscretizedMFD( min_mag=5., bin_width=0.1, occurrence_rates=[2.180e-07] ) nodal_plane_dist = PMF([(1., NodalPlane(135., 20., 90.))]) src = PointSource(source_id='1', name='pnt', tectonic_region_type='asc', mfd=mfd, rupture_mesh_spacing=1, magnitude_scaling_relationship=WC1994(), rupture_aspect_ratio=1., temporal_occurrence_model=PoissonTOM(50.), upper_seismogenic_depth=0, lower_seismogenic_depth=26, location=Point(-165.125, -83.600), nodal_plane_distribution=nodal_plane_dist, hypocenter_distribution=PMF([(1., 9.)])) ruptures = list(src.iter_ruptures()) self.assertEqual(len(ruptures), 1)
def create_oqhazardlib_source(self, tom, mesh_spacing, use_defaults=False): """ Converts the point source model into an instance of the :class: openquake.hazardlib.source.point_source.PointSource :param bool use_defaults: If set to true, will use put in default values for magitude scaling relation, rupture aspect ratio, nodal plane distribution or hypocentral depth distribution where missing. If set to False then value errors will be raised when information is missing. """ return PointSource( self.id, self.name, self.trt, conv.mfd_to_hazardlib(self.mfd), mesh_spacing, conv.mag_scale_rel_to_hazardlib(self.mag_scale_rel, use_defaults), conv.render_aspect_ratio(self.rupt_aspect_ratio, use_defaults), tom, self.upper_depth, self.lower_depth, self.geometry, conv.npd_to_pmf(self.nodal_plane_dist, use_defaults), conv.hdd_to_pmf(self.hypo_depth_dist, use_defaults))
def get_background_sources(self, sample_factor=None): """ Turn the background model of a given branch into a set of point sources :param sample_factor: Used to reduce the sources if OQ_SAMPLE_SOURCES is set """ background_sids = self.get_background_sids() if sample_factor is not None: # hack for use in the mosaic background_sids = random_filter( background_sids, sample_factor, seed=42) with h5py.File(self.source_file, "r") as hdf5: grid_loc = "/".join(["Grid", self.idx_set["grid_key"]]) # for instance Grid/FM0_0_MEANFS_MEANMSR_MeanRates mags = hdf5[grid_loc + "/Magnitude"][()] mmax = hdf5[grid_loc + "/MMax"][background_sids] rates = hdf5[grid_loc + "/RateArray"][background_sids, :] locations = hdf5["Grid/Locations"][background_sids, :] sources = [] for i, bg_idx in enumerate(background_sids): src_id = "_".join([self.idx_set["grid_key"], str(bg_idx)]) src_name = "|".join([self.idx_set["total_key"], str(bg_idx)]) mag_idx = (self.min_mag <= mags) & (mags < mmax[i]) src_mags = mags[mag_idx] src_mfd = EvenlyDiscretizedMFD( src_mags[0], src_mags[1] - src_mags[0], rates[i, mag_idx].tolist()) ps = PointSource( src_id, src_name, self.tectonic_region_type, src_mfd, self.mesh_spacing, self.msr, self.aspect, self.tom, self.usd, self.lsd, Point(locations[i, 0], locations[i, 1]), self.npd, self.hdd) ps.checksum = zlib.adler32(pickle.dumps(vars(ps), protocol=4)) ps._wkt = ps.wkt() ps.id = self.id ps.grp_id = self.grp_id ps.num_ruptures = ps.count_ruptures() sources.append(ps) return sources
def test(self): sitecol = SiteCollection([Site(Point(30.0, 30.0), 760., 1.0, 1.0)]) mfd = TruncatedGRMFD(4.5, 8.0, 0.1, 4.0, 1.0) sources = [ PointSource('001', 'Point1', 'Active Shallow Crust', mfd, 1.0, WC1994(), 1.0, PoissonTOM(50.0), 0.0, 30.0, Point(30.0, 30.5), PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))]), PMF([(1.0, 10.0)])) ] imtls = {'PGA': [0.01, 0.1, 0.2, 0.5, 0.8]} hc1 = calc_hazard_curves( sources, sitecol, imtls, {'Active Shallow Crust': AkkarBommer2010()})['PGA'] hc2 = calc_hazard_curves( sources, sitecol, imtls, {'Active Shallow Crust': SadighEtAl1997()})['PGA'] hc = .6 * hc1 + .4 * hc2 ag = AvgGMPE(b1=dict(AkkarBommer2010={'weight': .6}), b2=dict(SadighEtAl1997={'weight': .4})) hcm = calc_hazard_curves(sources, sitecol, imtls, {'Active Shallow Crust': ag})['PGA'] # the AvgGMPE is not producing real means!! numpy.testing.assert_almost_equal(hc, hcm, decimal=3)
from openquake.hazardlib.geo.line import Line from openquake.hazardlib.scalerel.point import PointMSR from openquake.hazardlib.scalerel.peer import PeerMSR from openquake.hazardlib.tom import PoissonTOM from openquake.hazardlib.geo.surface.simple_fault import SimpleFaultSurface from openquake.hmtk.comparison.rate_grids import RateGrid, RatePolygon SOURCE_MODEL_FILE = os.path.join(os.path.dirname(__file__), "rate_grid_test_model.xml") POINT_SOURCE = PointSource("PNT000", "Point 000", "Active Shallow Crust", EvenlyDiscretizedMFD(5.0, 0.1, [1.0]), 1.0, PointMSR(), 1.0, PoissonTOM(1.0), 0.0, 20.0, Point(15.05, 15.05), PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))]), PMF([(1.0, 5.0)])) BORDER_POINT_SOURCE = PointSource("PNT000", "Point 000", "Active Shallow Crust", EvenlyDiscretizedMFD(5.0, 0.1, [1.0]), 1.0, PointMSR(), 1.0, PoissonTOM(1.0), 0.0, 20.0,
def test_7_many_ruptures(self): source_id = name = 'test7-source' trt = TRT.VOLCANIC mag1 = 4.5 mag2 = 5.5 mag1_rate = 9e-3 mag2_rate = 9e-4 hypocenter1 = 9.0 hypocenter2 = 10.0 hypocenter1_weight = Decimal('0.8') hypocenter2_weight = Decimal('0.2') nodalplane1 = NodalPlane(strike=45, dip=90, rake=0) nodalplane2 = NodalPlane(strike=0, dip=45, rake=10) nodalplane1_weight = Decimal('0.3') nodalplane2_weight = Decimal('0.7') upper_seismogenic_depth = 2 lower_seismogenic_depth = 16 rupture_aspect_ratio = 2 rupture_mesh_spacing = 0.5 location = Point(0, 0) magnitude_scaling_relationship = PeerMSR() tom = PoissonTOM(time_span=50) mfd = EvenlyDiscretizedMFD(min_mag=mag1, bin_width=(mag2 - mag1), occurrence_rates=[mag1_rate, mag2_rate]) nodal_plane_distribution = PMF([(nodalplane1_weight, nodalplane1), (nodalplane2_weight, nodalplane2)]) hypocenter_distribution = PMF([(hypocenter1_weight, hypocenter1), (hypocenter2_weight, hypocenter2)]) point_source = PointSource( source_id, name, trt, mfd, rupture_mesh_spacing, magnitude_scaling_relationship, rupture_aspect_ratio, tom, upper_seismogenic_depth, lower_seismogenic_depth, location, nodal_plane_distribution, hypocenter_distribution) actual_ruptures = list(point_source.iter_ruptures()) self.assertEqual(len(actual_ruptures), point_source.count_ruptures()) expected_ruptures = { (mag1, nodalplane1.rake, hypocenter1): ( # probabilistic rupture's occurrence rate 9e-3 * 0.3 * 0.8, # rupture surface corners planar_surface_test_data.TEST_7_RUPTURE_1_CORNERS), (mag2, nodalplane1.rake, hypocenter1): (9e-4 * 0.3 * 0.8, planar_surface_test_data.TEST_7_RUPTURE_2_CORNERS), (mag1, nodalplane2.rake, hypocenter1): (9e-3 * 0.7 * 0.8, planar_surface_test_data.TEST_7_RUPTURE_3_CORNERS), (mag2, nodalplane2.rake, hypocenter1): (9e-4 * 0.7 * 0.8, planar_surface_test_data.TEST_7_RUPTURE_4_CORNERS), (mag1, nodalplane1.rake, hypocenter2): (9e-3 * 0.3 * 0.2, planar_surface_test_data.TEST_7_RUPTURE_5_CORNERS), (mag2, nodalplane1.rake, hypocenter2): (9e-4 * 0.3 * 0.2, planar_surface_test_data.TEST_7_RUPTURE_6_CORNERS), (mag1, nodalplane2.rake, hypocenter2): (9e-3 * 0.7 * 0.2, planar_surface_test_data.TEST_7_RUPTURE_7_CORNERS), (mag2, nodalplane2.rake, hypocenter2): (9e-4 * 0.7 * 0.2, planar_surface_test_data.TEST_7_RUPTURE_8_CORNERS) } for actual_rupture in actual_ruptures: expected_occurrence_rate, expected_corners = expected_ruptures[( actual_rupture.mag, actual_rupture.rake, actual_rupture.hypocenter.depth)] self.assertTrue( isinstance(actual_rupture, ParametricProbabilisticRupture)) self.assertEqual(actual_rupture.occurrence_rate, expected_occurrence_rate) self.assertIs(actual_rupture.temporal_occurrence_model, tom) self.assertEqual(actual_rupture.tectonic_region_type, trt) surface = actual_rupture.surface tl, tr, br, bl = expected_corners self.assertEqual(tl, surface.top_left) self.assertEqual(tr, surface.top_right) self.assertEqual(bl, surface.bottom_left) self.assertEqual(br, surface.bottom_right)
def run_smoothing(grid_lims, config, catalogue, completeness_table, map_config, run): """Run all the smoothing :params config: Dictionary of configuration parameters. For more info see helmstetter_werner_2012 code and docs. """ completeness_string = 'comp' for ym in completeness_table: completeness_string += '_%i_%.1f' % (ym[0], ym[1]) smoother_filename = "Australia_Adaptive_K%i_b%.3f_mmin%.1f_%s.csv" % ( config['k'], config['bvalue'], config['mmin'], completeness_string) filename = smoother_filename[:-4] + '.xml' if os.path.exists(filename) and not overwrite: print '%s already created, not overwriting!' % filename return smoother = h_w.HelmstetterEtAl2007(grid_lims, config, catalogue, storage_file=("Aus1_tmp2%.3f_%s.hdf5" % (config['bvalue'], run))) smoother._get_catalogue_completeness_weights(completeness_table) smoother.build_distance_arrays() smoother.build_catalogue_2_grid_array() # Exhaustive smoothing exhaustive = False if exhaustive == True: params, poiss_llh = smoother.exhaustive_smoothing( np.arange(2, 10, 1), np.arange(1.0e-6, 1.0e-5, 2.0e-6)) print params, poiss_llh smoother.config["k"] = params[0] smoother.config["r_min"] = params[1] #print 'Exiting now, re-run using optimised parameters' #sys.exit() d_i = smoother.optimise_bandwidths() smoother.run_smoothing(config["r_min"], d_i) data = np.column_stack([smoother.grid, smoother.rates]) np.savetxt( smoother_filename, data, # np.column_stack([smoother.grid, smoother.rates]), delimiter=",", fmt=["%.4f", "%.4f", "%.8e"], header="longitude,latitude,rate") # Creating a basemap - input a cconfiguration and (if desired) a title title = 'Smoothed seismicity rate for learning \nperiod %i %i, K=%i, Mmin=%.1f' % ( config['learning_start'], config['learning_end'], smoother.config['k'], smoother.config['mmin']) basemap1 = HMTKBaseMap(map_config, title) basemap1.m.drawmeridians( np.arange(map_config['min_lat'], map_config['max_lat'], 5)) basemap1.m.drawparallels( np.arange(map_config['min_lon'], map_config['max_lon'], 5)) # Adding the smoothed grip to the basemap sym = (2., 3., 'cx') x, y = basemap1.m(smoother.grid[:, 0], smoother.grid[:, 1]) if smoother.config['mmin'] == 3.5: vmax = -1.0 elif smoother.config['mmin'] == 4.0: vmax = -2.5 else: vmax = -1.0 basemap1.m.scatter(x, y, marker='s', c=np.log10(smoother.rates), cmap=plt.cm.coolwarm, zorder=10, lw=0, vmin=-7.0, vmax=vmax) basemap1.m.drawcoastlines(linewidth=1, zorder=50) # Add coastline on top #basemap1.m.drawmeridians(np.arange(llat, ulat, 5)) #basemap1.m.drawparallels(np.arange(llon, ulon, 5)) plt.colorbar(label='Log10(Smoothed rate per cell)') #plt.colorbar()#label='log10(Smoothed rate per cell)') plt.legend() #basemap1.m.scatter(x, y, marker = 's', c = smoother.data[:,4], cmap = plt.cm.coolwarm, zorder=10) #basemap1.m.scatter([150],[22], marker='o') #basemap1.fig.show() #(smoother.data[0], smoother.data[1]) #basemap1.add_catalogue(catalogue_depth_clean, erlay=False) figname = smoother_filename[:-4] + '_smoothed_rates_map.png' plt.savefig(figname) source_list = [] #i=0 min_mag = 4.5 max_mag = 7.2 # Read in data again to solve number fomatting issue in smoother.data # For some reason it just returns 0 for all a values #data = np.genfromtxt(smoother_filename, delimiter = ',', skip_header = 1) tom = PoissonTOM( 50) # Dummy temporal occurence model for building pt sources msr = Leonard2014_SCR() for j in range(len(data[:, 2])): identifier = 'ASS' + str(j) + '_' + str(run) name = 'Helmstetter' + str(j) + '_' + str(run) point = Point(data[j, 0], data[j, 1], 10) rate = data[j, 2] # Convert rate to a value aval = np.log10(rate) + config['bvalue'] * config["mmin"] mfd = TruncatedGRMFD(min_mag, max_mag, 0.1, aval, config['bvalue']) hypo_depth_dist = PMF([(0.5, 10.0), (0.25, 5.0), (0.25, 15.0)]) nodal_plane_dist = PMF([(0.3, NodalPlane(0, 30, 90)), (0.2, NodalPlane(90, 30, 90)), (0.3, NodalPlane(180, 30, 90)), (0.2, NodalPlane(270, 30, 90))]) point_source = PointSource(identifier, name, 'Non_cratonic', mfd, 2, msr, 2.0, tom, 0.1, 20.0, point, nodal_plane_dist, hypo_depth_dist) source_list.append(point_source) mod_name = "Australia_Adaptive_K%i_b%.3f" % (smoother.config['k'], smoother.config['bvalue']) nodes = list(map(obj_to_node, sorted(source_list))) source_model = Node("sourceModel", {"name": name}, nodes=nodes) with open(filename, 'wb') as f: nrml.write([source_model], f, '%s', xmlns=NAMESPACE)
def run_smoothing(grid_lims, smoothing_config, catalogue, completeness_table, map_config, run, overwrite=True): """Run all the smoothing """ ystart = completeness_table[-1][0] yend = catalogue.end_year catalogue_comp = deepcopy(catalogue) # Ensuring that catalogue is cleaned of earthquakes outside of # completeness period index = catalogue_comp.data['year'] >= ystart catalogue_comp.purge_catalogue(index) completeness_string = 'comp' for ym in completeness_table: completeness_string += '_%i_%.1f' % (ym[0], ym[1]) smoother_filename = 'Australia_Fixed_%i_%i_b%.3f_mmin_%.1f_0.1%s.csv' % ( smoothing_config["BandWidth"], smoothing_config["Length_Limit"], bvalue, completeness_table[0][1], completeness_string) filename = smoother_filename[:-4] + '.xml' if os.path.exists(filename) and not overwrite: print '%s already created, not overwriting!' % filename return smoother = SmoothedSeismicity( [105., 160., 0.1, -47., -5, 0.1, 0., 20., 20.], bvalue=smoothing_config['bvalue']) print 'Running smoothing' smoothed_grid = smoother.run_analysis( catalogue_comp, smoothing_config, completeness_table=completeness_table) smoother.write_to_csv(smoother_filename) from openquake.hazardlib.nrml import SourceModelParser, write, NAMESPACE from openquake.baselib.node import Node from openquake.hazardlib import nrml from openquake.hazardlib.sourcewriter import obj_to_node # Build nrml input file of point sources source_list = [] #i=0 min_mag = 4.5 max_mag = 7.8 bval = bvalue # just define as 1 for time being # Read in data again to solve number fomatting issue in smoother.data # For some reason it just returns 0 for all a values try: data = np.genfromtxt(smoother_filename, delimiter=',', skip_header=1) except ValueError: print 'Something wrong with file %s' % smoother_filename sys.exit() tom = PoissonTOM( 50) # Dummy temporal occurence model for building pt sources msr = Leonard2014_SCR() for j in range(len(data[:, 4])): # print smoother.data[j,:] identifier = 'FSS' + str(j) + '_' + str(run) name = 'Frankel' + str(j) + '_' + str(run) point = Point(data[j, 0], data[j, 1], data[j, 2]) annual_rate = data[j, 4] / (yend - ystart + 1) aval = np.log10(annual_rate) + smoothing_config[ 'bvalue'] * completeness_table[0][1] mfd = TruncatedGRMFD(min_mag, max_mag, 0.1, aval, bval) hypo_depth_dist = PMF([(0.5, 10.0), (0.25, 5.0), (0.25, 15.0)]) nodal_plane_dist = PMF([(0.3, NodalPlane(0, 30, 90)), (0.2, NodalPlane(90, 30, 90)), (0.3, NodalPlane(180, 30, 90)), (0.2, NodalPlane(270, 30, 90))]) point_source = PointSource(identifier, name, 'Non_cratonic', mfd, 2, msr, 2.0, tom, 0.1, 20.0, point, nodal_plane_dist, hypo_depth_dist) source_list.append(point_source) nodes = list(map(obj_to_node, sorted(source_list))) source_model = Node("sourceModel", {"name": name}, nodes=nodes) with open(filename, 'wb') as f: nrml.write([source_model], f, '%s', xmlns=NAMESPACE) # Creating a basemap - input a cconfiguration and (if desired) a title title = 'Smoothed seismicity rate for learning \nperiod %i 2017, Mmin = %.1f' % ( completeness_table[0][0], completeness_table[0][1]) basemap1 = HMTKBaseMap(map_config, 'Smoothed seismicity rate') # Adding the smoothed grip to the basemap sym = (2., 3., 'cx') x, y = basemap1.m(smoother.data[:, 0], smoother.data[:, 1]) basemap1.m.scatter(x, y, marker='s', c=np.log10(smoother.data[:, 4]), cmap=plt.cm.coolwarm, zorder=10, lw=0, vmin=-6.5, vmax=1.5) basemap1.m.drawcoastlines(linewidth=1, zorder=50) # Add coastline on top basemap1.m.drawmeridians( np.arange(map_config['min_lat'], map_config['max_lat'], 5)) basemap1.m.drawparallels( np.arange(map_config['min_lon'], map_config['max_lon'], 5)) plt.colorbar(label='log10(Smoothed rate per cell)') plt.legend() figname = smoother_filename[:-4] + '_smoothed_rates_map.png' plt.savefig(figname)
nodal_plane_distribution = nodal_plane_dist # FIXME! update based on data extracted from shapefile hypocenter_distribution = hypo_depth_dist rupture_aspect_ratio = 2 mfd = TruncatedGRMFD(min_mag, max_mag, 0.1, a_values[i], b_values[i]) new_mfd = gr2inc_mmax(mfd, mmaxs[dom['CODE']], mmaxs_w[dom['CODE']], model_weight=1.) mfd = new_mfd name = 'Hall_%i' % ids[i] source_id = name if source_id in pt_ids: print 'Point source %s already exists!' % source_id print 'Skipping this source for trt %s' % dom['TRT'] else: pt_source = PointSource(source_id, name, dom['GMM_TRT'], mfd, 2, msr, 1.5, tom, 0.1, 20.0, pt, nodal_plane_dist, hypo_depth_dist) merged_pts.append(pt_source) pt_ids.append(source_id) outfile = 'Hall2007_2018.xml' name = outfile.rstrip('.xml') if nrml_version == '04': nodes = list(map(obj_to_node, sorted(merged_pts))) source_model = Node("sourceModel", {"name": name}, nodes=nodes) with open(outfile, 'wb') as f: nrml.write([source_model], f, '%s', xmlns=NAMESPACE)
identifier = 'FSS' + str(j) name = 'Frankel' + str(j) point = Point(data[j, 0], data[j, 1], data[j, 2]) rate = data[j, 4] aval = np.log10(rate) # aval = rate # trying this based on some testing # aval = np.log10(rate) #+ bval*completeness_table_a[0][1] # print aval mfd = TruncatedGRMFD(min_mag, max_mag, 0.1, aval, bval) hypo_depth_dist = PMF([(0.5, 10.0), (0.25, 5.0), (0.25, 15.0)]) nodal_plane_dist = PMF([(0.3, NodalPlane(0, 30, 90)), (0.2, NodalPlane(90, 30, 90)), (0.3, NodalPlane(180, 30, 90)), (0.2, NodalPlane(270, 30, 90))]) point_source = PointSource(identifier, name, 'Non_cratonic', mfd, 2, msr, 2.0, tom, 0.1, 20.0, point, nodal_plane_dist, hypo_depth_dist) source_list.append(point_source) # i+=1 # if j==1000: # break filename = "smoothed_frankel_50_3_mmin_%.1f_b%.3f_0.1.xml" % ( completeness_table_a[0][-1], bvalue) mod_name = 'smoothed_frankel_50_3_mmin_%.1f_b%.3f_0.1' % ( completeness_table_a[0][-1], bvalue) nodes = list(map(obj_to_node, sorted(source_list))) source_model = Node("sourceModel", {"name": name}, nodes=nodes) with open(filename, 'wb') as f: nrml.write([source_model], f, '%s', xmlns=NAMESPACE)