def plot_sources(point_source, fault_source): llon, ulon, llat, ulat = 105, 155, -45, -5, map_config = {'min_lon': np.floor(llon), 'max_lon': np.ceil(ulon), 'min_lat': np.floor(llat), 'max_lat': np.ceil(ulat), 'resolution':'i'} basemap1 = HMTKBaseMap(map_config, 'Point and fault sources') for pt_source in point_source: x,y = basemap1.m(pt_source.location.longitude, pt_source.location.latitude) basemap1.m.plot(x, y, 'rs', markersize = 2.0) for simplefault in fault_source: trace_lons = np.array([pnt.longitude for pnt in simplefault.fault_trace.points]) trace_lats = np.array([pnt.latitude for pnt in simplefault.fault_trace.points]) x, y = basemap1.m(trace_lons, trace_lats) basemap1.m.plot(x, y, 'b', linewidth=1.3) basemap1.savemap('Point and fault sources.png')
def decluster_catalogue(catalogue, config): ### ### Catalogue cache or read/cache ### # Set up the declustering algorithm # Step 1 - set-up the tool if config['decluster_method'] == 'afteran': decluster_method = Afteran() elif config['decluster_method'] == 'gardner_knopoff': decluster_method = GardnerKnopoffType1() else: print "invalid decluster_method configuration: use [afteran|gardner_knopoff]" return None print 'Running declustering ...' cluster_vector, flag_vector = decluster_method.decluster(catalogue, config) print 'done!' print '%s clusters found' % np.max(cluster_vector) print '%s Non-poissionian events identified' % np.sum(flag_vector != 0) if config['plot']: ### ### Map Config ### map_dpi = 90 add_geology = True add_sourcemodel = True savefig=False #map_title = 'Brazilian Seismic Zones' map_title = 'Clusters' #map_title = 'ISC-GEM Catalogue' #map_title = 'South-American Lithology' # Configure the limits of the map and the coastline resolution map_config = {'min_lon': -80.0, 'max_lon': -30.0, 'min_lat': -37.0, 'max_lat': 14.0, 'resolution':'l'} #map_config = {'min_lon': -72.0, 'max_lon': -68.0, 'min_lat': -22.0, 'max_lat': -18.0, 'resolution':'l'} #map_config = {'min_lon': -95.0, 'max_lon': -25.0, 'min_lat': -65.0, 'max_lat': 25.0, 'resolution':'l'} basemap = HMTKBaseMap(map_config, map_title, dpi=map_dpi) #basemap.add_catalogue(catalogue, linewidth=0.2, alpha=0.1, overlay=True) idx = cluster_vector != 0 x = catalogue.data['longitude'][idx] y = catalogue.data['latitude'][idx] c = cluster_vector[idx] basemap.add_colour_scaled_points(x, y, c, overlay=True, shape='s', alpha=0.5, size=100, linewidth=0.5, facecolor='none', cmap=plt.cm.get_cmap('Paired'), ) plt.show() if config['figname']: basemap.savemap(config['figname']) print 'Original catalogue had %s events' % catalogue.get_number_events() catalogue.select_catalogue_events(flag_vector == 0) print 'Purged catalogue now contains %s events' % catalogue.get_number_events() if config['filename']: writer = CsvCatalogueWriter(config['filename']) writer.write_file(catalogue) return catalogue
def run_smoothing(grid_lims, smoothing_config, catalogue, completeness_table, map_config, run, overwrite=True): """Run all the smoothing """ ystart = completeness_table[-1][0] yend = catalogue.end_year catalogue_comp = deepcopy(catalogue) # Ensuring that catalogue is cleaned of earthquakes outside of # completeness period index = catalogue_comp.data['year'] >= ystart catalogue_comp.purge_catalogue(index) completeness_string = 'comp' for ym in completeness_table: completeness_string += '_%i_%.1f' % (ym[0], ym[1]) smoother_filename = 'Australia_Fixed_%i_%i_b%.3f_mmin_%.1f_0.1%s.csv' % ( smoothing_config["BandWidth"], smoothing_config["Length_Limit"], bvalue, completeness_table[0][1], completeness_string) filename = smoother_filename[:-4] + '.xml' if os.path.exists(filename) and not overwrite: print '%s already created, not overwriting!' % filename return smoother = SmoothedSeismicity( [105., 160., 0.1, -47., -5, 0.1, 0., 20., 20.], bvalue=smoothing_config['bvalue']) print 'Running smoothing' smoothed_grid = smoother.run_analysis( catalogue_comp, smoothing_config, completeness_table=completeness_table) smoother.write_to_csv(smoother_filename) from openquake.hazardlib.nrml import SourceModelParser, write, NAMESPACE from openquake.baselib.node import Node from openquake.hazardlib import nrml from openquake.hazardlib.sourcewriter import obj_to_node # Build nrml input file of point sources source_list = [] #i=0 min_mag = 4.5 max_mag = 7.8 bval = bvalue # just define as 1 for time being # Read in data again to solve number fomatting issue in smoother.data # For some reason it just returns 0 for all a values try: data = np.genfromtxt(smoother_filename, delimiter=',', skip_header=1) except ValueError: print 'Something wrong with file %s' % smoother_filename sys.exit() tom = PoissonTOM( 50) # Dummy temporal occurence model for building pt sources msr = Leonard2014_SCR() for j in range(len(data[:, 4])): # print smoother.data[j,:] identifier = 'FSS' + str(j) + '_' + str(run) name = 'Frankel' + str(j) + '_' + str(run) point = Point(data[j, 0], data[j, 1], data[j, 2]) annual_rate = data[j, 4] / (yend - ystart + 1) aval = np.log10(annual_rate) + smoothing_config[ 'bvalue'] * completeness_table[0][1] mfd = TruncatedGRMFD(min_mag, max_mag, 0.1, aval, bval) hypo_depth_dist = PMF([(0.5, 10.0), (0.25, 5.0), (0.25, 15.0)]) nodal_plane_dist = PMF([(0.3, NodalPlane(0, 30, 90)), (0.2, NodalPlane(90, 30, 90)), (0.3, NodalPlane(180, 30, 90)), (0.2, NodalPlane(270, 30, 90))]) point_source = PointSource(identifier, name, 'Non_cratonic', mfd, 2, msr, 2.0, tom, 0.1, 20.0, point, nodal_plane_dist, hypo_depth_dist) source_list.append(point_source) nodes = list(map(obj_to_node, sorted(source_list))) source_model = Node("sourceModel", {"name": name}, nodes=nodes) with open(filename, 'wb') as f: nrml.write([source_model], f, '%s', xmlns=NAMESPACE) # Creating a basemap - input a cconfiguration and (if desired) a title title = 'Smoothed seismicity rate for learning \nperiod %i 2017, Mmin = %.1f' % ( completeness_table[0][0], completeness_table[0][1]) basemap1 = HMTKBaseMap(map_config, 'Smoothed seismicity rate') # Adding the smoothed grip to the basemap sym = (2., 3., 'cx') x, y = basemap1.m(smoother.data[:, 0], smoother.data[:, 1]) basemap1.m.scatter(x, y, marker='s', c=np.log10(smoother.data[:, 4]), cmap=plt.cm.coolwarm, zorder=10, lw=0, vmin=-6.5, vmax=1.5) basemap1.m.drawcoastlines(linewidth=1, zorder=50) # Add coastline on top basemap1.m.drawmeridians( np.arange(map_config['min_lat'], map_config['max_lat'], 5)) basemap1.m.drawparallels( np.arange(map_config['min_lon'], map_config['max_lon'], 5)) plt.colorbar(label='log10(Smoothed rate per cell)') plt.legend() figname = smoother_filename[:-4] + '_smoothed_rates_map.png' plt.savefig(figname)
catalogue.sort_catalogue_chronologically() print 'Catalogue sorted chronologically!' # In[ ]: # Configure the limits of the map and the coastline resolution map_config = { 'min_lon': -80.0, 'max_lon': -30.0, 'min_lat': -37.0, 'max_lat': 14.0, 'resolution': 'l' } # Create a hmtk basemap basemap1 = HMTKBaseMap(map_config, 'Earthquake Catalogue') # Add a catalogue basemap1.add_catalogue(catalogue) # In[ ]: # Limit the catalogue to the time period 1960 - 2012 valid_time = np.logical_and(catalogue.data['year'] >= 1900, catalogue.data['year'] <= 2014) catalogue.select_catalogue_events(valid_time) plot_magnitude_time_density(catalogue, 0.2, 2.0) print 'Catalogue now contains %s events' % catalogue.get_number_events() # In[ ]: # Show distribution of magnitudes with time
# Map configuration llon, ulon, llat, ulat = source.catalogue.get_bounding_box() #map_config = {'min_lon': np.floor(llon), 'max_lon': np.ceil(ulon), # 'min_lat': np.floor(llat), 'max_lat': np.ceil(ulat), 'resolution':'c'} #map_config = {'min_lon': np.floor(105), 'max_lon': np.ceil(155), # 'min_lat': np.floor(-45), 'max_lat': np.ceil(-9), 'resolution':'c'} map_config = { 'min_lon': np.floor(100), 'max_lon': np.ceil(160), 'min_lat': np.floor(-45), 'max_lat': np.ceil(-4), 'resolution': 'c' } # Creating a basemap - input a cconfiguration and (if desired) a title basemap1 = HMTKBaseMap(map_config, 'Smoothed seismicity rate') basemap1.m.drawmeridians(np.arange(llat, ulat, 5)) basemap1.m.drawparallels(np.arange(llon, ulon, 5)) #print smoother.data[:,0] #print smoother.data[:,1] # Adding the smoothed grip to the basemap sym = (2., 3., 'cx') x, y = basemap1.m(smoother.data[:, 0], smoother.data[:, 1]) #print data[:,4] basemap1.m.scatter(x, y, marker='s', c=np.log10(smoother.data[:, 4] - bvalue * completeness_table_a[0][1]), cmap=plt.cm.coolwarm, zorder=10,
writer.write_file(catalogue) #exit() print 'File %s written' % output_file_name f=open(input_catalogue_file + ".pkl",'wb') pickle.dump(catalogue, f) f.close() ### ### Mapa ### # Create a hmtk basemap basemap1 = HMTKBaseMap(map_config, map_title, dpi=map_dpi) ### ### Geologia ### if add_geology: wms_cprm = "http://onegeology.cprm.gov.br/cgi-bin/BRA_GSB_EN_Bedrock_Geology/wms?" wms_oneg = "http://mapdmzrec.brgm.fr/cgi-bin/mapserv54?map=/carto/ogg/mapFiles/CGMW_Bedrock_and_Structural_Geology.map&" wmsl_oneg = {'server_url': wms_oneg, 'layers': ['World_CGMW_50M_Geology'], } wmsl_br_blt = {'server_url': wms_cprm, 'layers': ['BRA_GSB_EN_1M_BLT'], 'styles': ['default'],
likelihood_filename = os.path.join('llh_results', smoother_filename[:-4] + '_llh.csv') f_out = open(likelihood_filename, 'w') line = '%.10f,%.10f,%.10f,%.10f,%.10f' % (poiss_llh, kagan_i0, kagan_i1, uniform_llh, prob_gain) f_out.write(line) f_out.close() ### #sys.exit() ### # Creating a basemap - input a cconfiguration and (if desired) a title title = 'Smoothed seismicity rate for learning \nperiod %i %i, K=%i, Mmin=%.1f' % ( learning_start, learning_end, smoother.config['k'], smoother.config['mmin']) basemap1 = HMTKBaseMap(map_config, title) basemap1.m.drawmeridians(np.arange(llat, ulat, 5)) basemap1.m.drawparallels(np.arange(llon, ulon, 5)) # Adding the smoothed grip to the basemap sym = (2., 3., 'cx') x, y = basemap1.m(smoother.grid[:, 0], smoother.grid[:, 1]) if smoother.config['mmin'] == 3.5: vmax = -1.0 elif smoother.config['mmin'] == 4.0: vmax = -2.5 else: vmax = -1.0 basemap1.m.scatter(x, y, marker='s', c=np.log10(smoother.rates),
source_model = parser.read_file( "Aus Source Model 1") # You need to supply a name for the source model # Map configuration llon, ulon, llat, ulat = catalogue_clean.get_bounding_box() #map_config = {'min_lon': np.floor(llon), 'max_lon': np.ceil(ulon), # 'min_lat': np.floor(llat), 'max_lat': np.ceil(ulat), 'resolution':'c'} map_config = { 'min_lon': np.floor(100), 'max_lon': np.ceil(160), 'min_lat': np.floor(-45), 'max_lat': np.ceil(-4), 'resolution': 'c' } # Creating a basemap - input a cconfiguration and (if desired) a title basemap1 = HMTKBaseMap(map_config, 'Earthquake Catalogue') # Adding the seismic sources basemap1.add_source_model(source_model, area_border='r-', border_width=1.5, alpha=0.5) # Select catalogue from within sourcezone selector1 = CatalogueSelector(catalogue_depth_clean, create_copy=True) for source in source_model.sources: source.select_catalogue(selector1) llon, ulon, llat, ulat = source.catalogue.get_bounding_box() print llon, ulon, llat, ulat # Map the Source
def run_smoothing(grid_lims, config, catalogue, completeness_table, map_config, run): """Run all the smoothing :params config: Dictionary of configuration parameters. For more info see helmstetter_werner_2012 code and docs. """ completeness_string = 'comp' for ym in completeness_table: completeness_string += '_%i_%.1f' % (ym[0], ym[1]) smoother_filename = "Australia_Adaptive_K%i_b%.3f_mmin%.1f_%s.csv" % ( config['k'], config['bvalue'], config['mmin'], completeness_string) filename = smoother_filename[:-4] + '.xml' if os.path.exists(filename) and not overwrite: print '%s already created, not overwriting!' % filename return smoother = h_w.HelmstetterEtAl2007(grid_lims, config, catalogue, storage_file=("Aus1_tmp2%.3f_%s.hdf5" % (config['bvalue'], run))) smoother._get_catalogue_completeness_weights(completeness_table) smoother.build_distance_arrays() smoother.build_catalogue_2_grid_array() # Exhaustive smoothing exhaustive = False if exhaustive == True: params, poiss_llh = smoother.exhaustive_smoothing( np.arange(2, 10, 1), np.arange(1.0e-6, 1.0e-5, 2.0e-6)) print params, poiss_llh smoother.config["k"] = params[0] smoother.config["r_min"] = params[1] #print 'Exiting now, re-run using optimised parameters' #sys.exit() d_i = smoother.optimise_bandwidths() smoother.run_smoothing(config["r_min"], d_i) data = np.column_stack([smoother.grid, smoother.rates]) np.savetxt( smoother_filename, data, # np.column_stack([smoother.grid, smoother.rates]), delimiter=",", fmt=["%.4f", "%.4f", "%.8e"], header="longitude,latitude,rate") # Creating a basemap - input a cconfiguration and (if desired) a title title = 'Smoothed seismicity rate for learning \nperiod %i %i, K=%i, Mmin=%.1f' % ( config['learning_start'], config['learning_end'], smoother.config['k'], smoother.config['mmin']) basemap1 = HMTKBaseMap(map_config, title) basemap1.m.drawmeridians( np.arange(map_config['min_lat'], map_config['max_lat'], 5)) basemap1.m.drawparallels( np.arange(map_config['min_lon'], map_config['max_lon'], 5)) # Adding the smoothed grip to the basemap sym = (2., 3., 'cx') x, y = basemap1.m(smoother.grid[:, 0], smoother.grid[:, 1]) if smoother.config['mmin'] == 3.5: vmax = -1.0 elif smoother.config['mmin'] == 4.0: vmax = -2.5 else: vmax = -1.0 basemap1.m.scatter(x, y, marker='s', c=np.log10(smoother.rates), cmap=plt.cm.coolwarm, zorder=10, lw=0, vmin=-7.0, vmax=vmax) basemap1.m.drawcoastlines(linewidth=1, zorder=50) # Add coastline on top #basemap1.m.drawmeridians(np.arange(llat, ulat, 5)) #basemap1.m.drawparallels(np.arange(llon, ulon, 5)) plt.colorbar(label='Log10(Smoothed rate per cell)') #plt.colorbar()#label='log10(Smoothed rate per cell)') plt.legend() #basemap1.m.scatter(x, y, marker = 's', c = smoother.data[:,4], cmap = plt.cm.coolwarm, zorder=10) #basemap1.m.scatter([150],[22], marker='o') #basemap1.fig.show() #(smoother.data[0], smoother.data[1]) #basemap1.add_catalogue(catalogue_depth_clean, erlay=False) figname = smoother_filename[:-4] + '_smoothed_rates_map.png' plt.savefig(figname) source_list = [] #i=0 min_mag = 4.5 max_mag = 7.2 # Read in data again to solve number fomatting issue in smoother.data # For some reason it just returns 0 for all a values #data = np.genfromtxt(smoother_filename, delimiter = ',', skip_header = 1) tom = PoissonTOM( 50) # Dummy temporal occurence model for building pt sources msr = Leonard2014_SCR() for j in range(len(data[:, 2])): identifier = 'ASS' + str(j) + '_' + str(run) name = 'Helmstetter' + str(j) + '_' + str(run) point = Point(data[j, 0], data[j, 1], 10) rate = data[j, 2] # Convert rate to a value aval = np.log10(rate) + config['bvalue'] * config["mmin"] mfd = TruncatedGRMFD(min_mag, max_mag, 0.1, aval, config['bvalue']) hypo_depth_dist = PMF([(0.5, 10.0), (0.25, 5.0), (0.25, 15.0)]) nodal_plane_dist = PMF([(0.3, NodalPlane(0, 30, 90)), (0.2, NodalPlane(90, 30, 90)), (0.3, NodalPlane(180, 30, 90)), (0.2, NodalPlane(270, 30, 90))]) point_source = PointSource(identifier, name, 'Non_cratonic', mfd, 2, msr, 2.0, tom, 0.1, 20.0, point, nodal_plane_dist, hypo_depth_dist) source_list.append(point_source) mod_name = "Australia_Adaptive_K%i_b%.3f" % (smoother.config['k'], smoother.config['bvalue']) nodes = list(map(obj_to_node, sorted(source_list))) source_model = Node("sourceModel", {"name": name}, nodes=nodes) with open(filename, 'wb') as f: nrml.write([source_model], f, '%s', xmlns=NAMESPACE)
# In[ ]: # Sort catalogue chronologically catalogue.sort_catalogue_chronologically() print 'Catalogue sorted chronologically!' # In[ ]: # Configure the limits of the map and the coastline resolution map_config = {'min_lon': -80.0, 'max_lon': -30.0, 'min_lat': -37.0, 'max_lat': 14.0, 'resolution':'l'} # Create a hmtk basemap basemap1 = HMTKBaseMap(map_config, 'Earthquake Catalogue') # Add a catalogue basemap1.add_catalogue(catalogue) # In[ ]: # Limit the catalogue to the time period 1960 - 2012 valid_time = np.logical_and(catalogue.data['year'] >= 1900, catalogue.data['year'] <= 2014) catalogue.select_catalogue_events(valid_time) plot_magnitude_time_density(catalogue, 0.2, 2.0) print 'Catalogue now contains %s events' % catalogue.get_number_events() # In[ ]:
parser = nrmlSourceModelParser(area_source_file) area_model = parser.read_file() # <codecell> # Configure the limits of the map and the coastline resolution map_config = { 'min_lon': -80.0, 'max_lon': -30.0, 'min_lat': -37.0, 'max_lat': 14.0, 'resolution': 'l' } # Create a hmtk basemap basemap1 = HMTKBaseMap(map_config, 'Source Models') # Add fault sources #basemap1.add_source_model(fault_model, overlay=True) # Add area sources basemap1.add_source_model(area_model, area_border='b-') # <codecell> # Load in the catalogue from hmtk.parsers.catalogue.csv_catalogue_parser import CsvCatalogueParser input_file = 'data_input/hmtk_bsb2013.csv' parser = CsvCatalogueParser(input_file) catalogue = parser.read_file() print 'Input complete: %s events in catalogue' % catalogue.get_number_events() print 'Catalogue Covers the Period: %s to %s' % (catalogue.start_year,
# <codecell> # Import an Area Source Model area_source_file = 'snippets/s03.xml' parser = nrmlSourceModelParser(area_source_file) area_model = parser.read_file() # <codecell> # Configure the limits of the map and the coastline resolution map_config = {'min_lon': -80.0, 'max_lon': -30.0, 'min_lat': -37.0, 'max_lat': 14.0, 'resolution':'l'} # Create a hmtk basemap basemap1 = HMTKBaseMap(map_config, 'Source Models') # Add fault sources #basemap1.add_source_model(fault_model, overlay=True) # Add area sources basemap1.add_source_model(area_model, area_border='b-') # <codecell> # Load in the catalogue from hmtk.parsers.catalogue.csv_catalogue_parser import CsvCatalogueParser input_file = 'data_input/hmtk_bsb2013.csv' parser = CsvCatalogueParser(input_file) catalogue = parser.read_file() print 'Input complete: %s events in catalogue' % catalogue.get_number_events() print 'Catalogue Covers the Period: %s to %s' % (catalogue.start_year, catalogue.end_year)
import numpy as np import matplotlib.pyplot as plt plt.xkcd() from hmtk.plotting.mapping import HMTKBaseMap dpi = 90 map_config = {'min_lon': -80.0, 'max_lon': -30.0, 'min_lat': -37.0, 'max_lat': 14.0, 'resolution':'l'} basemap1 = HMTKBaseMap(map_config, '\gls{bsb2013} helmstetter2012 catalogues', dpi=dpi) X = np.genfromtxt('cat', skip_header=True) x = X[:,1] y = X[:,2] z = X[:,3] #print min(z), max(z) basemap1.add_size_scaled_points(y, x, z, alpha=0.3, colour='k', smin=0.5, sscale=2, facecolor='none', overlay=True, label='learning') Y = np.genfromtxt('TARG', skip_header=True) #print Y[:10] x = Y[:,1] y = Y[:,2] z = Y[:,3]