def nrml_from_shapefile(shapefile, shapefile_faultname_attribute, shapefile_dip_attribute, shapefile_sliprate_attribute, source_model_name, simple_fault_tectonic_region, magnitude_scaling_relation, rupture_aspect_ratio, upper_depth, lower_depth, a_value, b_value, min_mag, max_mag, rake, output_dir, incremental_mfd, shapefile_uplift_attribute=None, quiet=True): """Driver routine to convert nrml to shapefile """ # Get geometry fault_traces, faultnames, dips, \ sliprate, fault_lengths = parse_line_shapefile(shapefile, shapefile_faultname_attribute, shapefile_dip_attribute, shapefile_sliprate_attribute, shapefile_uplift_attribute) # Output is written line-by-line to this list output_xml = [] append_xml_header(output_xml, source_model_name) # If b-value is not given, take from Leonard 2008 model region_shapefile = '../zones/Leonard2008/shapefiles/LEONARD08_NSHA18_MFD.shp' if b_value is None: b_value = b_value_from_region(fault_traces, region_shapefile) # If tectonic region type is not given, take from domains model domains_shapefile = '../zones/Domains/shapefiles/DOMAINS_NSHA18.shp' if simple_fault_tectonic_region is None: simple_fault_tectonic_region = trt_from_domains(fault_traces, domains_shapefile) # Loop through each fault and add source specific info for i in range(len(fault_traces)): # Skip faults with zero or null sliprate if sliprate[i] == "" or sliprate[i] == 0: continue simple_fault_id = i A = fault_lengths[i]*(float(lower_depth)-float(upper_depth)) # Calculate M_max from scaling relations scalrel = Leonard2014_SCR() bin_width = 0.1 max_mag = scalrel.get_median_mag(A, float(rake)) char_mag = max_mag - 0.25 #characteristic magnitude for OQ def # print A # Calculate characteristic incremental occurrence rates from slip rate if sliprate[i] != '""': print sliprate[i] # just to calculate the moment rate, need to fix this function a_value, moment_rate = fault_slip_rate_GR_conversion.slip2GR(sliprate[i], A, float(b_value[i]), float(max_mag), M_min=0.0) a_value=None # We aren't using the a value # mfd = YoungsCoppersmith1985MFD.from_total_moment_rate(min_mag=min_mag, # b_val=float(b_value), # char_mag=float(max_mag), # total_moment_rate=moment_rate, # bin_width=0.1) # mags,rates=zip(*mfd.get_annual_occurrence_rates()) append_rupture_geometry(output_xml, fault_traces[i], dips[i], simple_fault_id, faultnames[i], upper_depth, lower_depth, simple_fault_tectonic_region[i]) if incremental_mfd: append_earthquake_information_inc(output_xml, magnitude_scaling_relation, rupture_aspect_ratio, char_mag, b_value[i], min_mag, max_mag, rake, moment_rate, bin_width) else: append_earthquake_information_YC(output_xml, magnitude_scaling_relation, rupture_aspect_ratio, char_mag, b_value[i], min_mag, max_mag, rake, moment_rate, bin_width) # Close xml output_xml.append(' </sourceModel>') output_xml.append('</nrml>') # Add newlines output_xml = [oxml + '\n' for oxml in output_xml] return output_xml
def run_smoothing(grid_lims, smoothing_config, catalogue, completeness_table, map_config, run, overwrite=True): """Run all the smoothing """ ystart = completeness_table[-1][0] yend = catalogue.end_year catalogue_comp = deepcopy(catalogue) # Ensuring that catalogue is cleaned of earthquakes outside of # completeness period index = catalogue_comp.data['year'] >= ystart catalogue_comp.purge_catalogue(index) completeness_string = 'comp' for ym in completeness_table: completeness_string += '_%i_%.1f' % (ym[0], ym[1]) smoother_filename = 'Australia_Fixed_%i_%i_b%.3f_mmin_%.1f_0.1%s.csv' % ( smoothing_config["BandWidth"], smoothing_config["Length_Limit"], bvalue, completeness_table[0][1], completeness_string) filename = smoother_filename[:-4] + '.xml' if os.path.exists(filename) and not overwrite: print '%s already created, not overwriting!' % filename return smoother = SmoothedSeismicity( [105., 160., 0.1, -47., -5, 0.1, 0., 20., 20.], bvalue=smoothing_config['bvalue']) print 'Running smoothing' smoothed_grid = smoother.run_analysis( catalogue_comp, smoothing_config, completeness_table=completeness_table) smoother.write_to_csv(smoother_filename) from openquake.hazardlib.nrml import SourceModelParser, write, NAMESPACE from openquake.baselib.node import Node from openquake.hazardlib import nrml from openquake.hazardlib.sourcewriter import obj_to_node # Build nrml input file of point sources source_list = [] #i=0 min_mag = 4.5 max_mag = 7.8 bval = bvalue # just define as 1 for time being # Read in data again to solve number fomatting issue in smoother.data # For some reason it just returns 0 for all a values try: data = np.genfromtxt(smoother_filename, delimiter=',', skip_header=1) except ValueError: print 'Something wrong with file %s' % smoother_filename sys.exit() tom = PoissonTOM( 50) # Dummy temporal occurence model for building pt sources msr = Leonard2014_SCR() for j in range(len(data[:, 4])): # print smoother.data[j,:] identifier = 'FSS' + str(j) + '_' + str(run) name = 'Frankel' + str(j) + '_' + str(run) point = Point(data[j, 0], data[j, 1], data[j, 2]) annual_rate = data[j, 4] / (yend - ystart + 1) aval = np.log10(annual_rate) + smoothing_config[ 'bvalue'] * completeness_table[0][1] mfd = TruncatedGRMFD(min_mag, max_mag, 0.1, aval, bval) hypo_depth_dist = PMF([(0.5, 10.0), (0.25, 5.0), (0.25, 15.0)]) nodal_plane_dist = PMF([(0.3, NodalPlane(0, 30, 90)), (0.2, NodalPlane(90, 30, 90)), (0.3, NodalPlane(180, 30, 90)), (0.2, NodalPlane(270, 30, 90))]) point_source = PointSource(identifier, name, 'Non_cratonic', mfd, 2, msr, 2.0, tom, 0.1, 20.0, point, nodal_plane_dist, hypo_depth_dist) source_list.append(point_source) nodes = list(map(obj_to_node, sorted(source_list))) source_model = Node("sourceModel", {"name": name}, nodes=nodes) with open(filename, 'wb') as f: nrml.write([source_model], f, '%s', xmlns=NAMESPACE) # Creating a basemap - input a cconfiguration and (if desired) a title title = 'Smoothed seismicity rate for learning \nperiod %i 2017, Mmin = %.1f' % ( completeness_table[0][0], completeness_table[0][1]) basemap1 = HMTKBaseMap(map_config, 'Smoothed seismicity rate') # Adding the smoothed grip to the basemap sym = (2., 3., 'cx') x, y = basemap1.m(smoother.data[:, 0], smoother.data[:, 1]) basemap1.m.scatter(x, y, marker='s', c=np.log10(smoother.data[:, 4]), cmap=plt.cm.coolwarm, zorder=10, lw=0, vmin=-6.5, vmax=1.5) basemap1.m.drawcoastlines(linewidth=1, zorder=50) # Add coastline on top basemap1.m.drawmeridians( np.arange(map_config['min_lat'], map_config['max_lat'], 5)) basemap1.m.drawparallels( np.arange(map_config['min_lon'], map_config['max_lon'], 5)) plt.colorbar(label='log10(Smoothed rate per cell)') plt.legend() figname = smoother_filename[:-4] + '_smoothed_rates_map.png' plt.savefig(figname)
from openquake.hazardlib import nrml from openquake.hazardlib.sourcewriter import obj_to_node # Build nrml input file of point sources source_list = [] #i=0 min_mag = 4.5 max_mag = 7.8 bval = bvalue # just define as 1 for time being # Read in data again to solve number fomatting issue in smoother.data # For some reason it just returns 0 for all a values data = np.genfromtxt(smoother_filename, delimiter=',', skip_header=1) #print max(data[:,4]) #print data[:,4] #print len(data[:,4]) tom = PoissonTOM(50) # Dummy temporal occurence model for building pt sources msr = Leonard2014_SCR() for j in range(len(data[:, 4])): # print smoother.data[j,:] identifier = 'FSS' + str(j) name = 'Frankel' + str(j) point = Point(data[j, 0], data[j, 1], data[j, 2]) rate = data[j, 4] aval = np.log10(rate) # aval = rate # trying this based on some testing # aval = np.log10(rate) #+ bval*completeness_table_a[0][1] # print aval mfd = TruncatedGRMFD(min_mag, max_mag, 0.1, aval, bval) hypo_depth_dist = PMF([(0.5, 10.0), (0.25, 5.0), (0.25, 15.0)]) nodal_plane_dist = PMF([(0.3, NodalPlane(0, 30, 90)), (0.2, NodalPlane(90, 30, 90)), (0.3, NodalPlane(180, 30, 90)),
def nrml_from_shapefile(shapefile, shapefile_faultname_attribute, shapefile_dip_attribute, shapefile_sliprate_attribute, source_model_name, simple_fault_tectonic_region, magnitude_scaling_relation, rupture_aspect_ratio, upper_depth, lower_depth, a_value, b_value, min_mag, max_mag, rake, output_dir, shapefile_uplift_attribute=None, quiet=True): """Driver routine to convert nrml to shapefile """ # Get geometry fault_traces, faultnames, dips, \ sliprate, fault_lengths = parse_line_shapefile(shapefile, shapefile_faultname_attribute, shapefile_dip_attribute, shapefile_sliprate_attribute, shapefile_uplift_attribute) # Output is written line-by-line to this list output_xml = [] append_xml_header(output_xml, source_model_name) # If b-value is not given, take from Leonard 2008 model region_shapefile = '../zones/Leonard2008/shapefiles/LEONARD08_NSHA18_MFD.shp' if b_value is None: b_value = b_value_from_region(fault_traces, region_shapefile) # If tectonic region type is not given, take from domains model domains_shapefile = '../zones/Domains/shapefiles/DOMAINS_NSHA18.shp' if simple_fault_tectonic_region is None: simple_fault_tectonic_region = trt_from_domains( fault_traces, domains_shapefile) # Loop through each fault and add source specific info for i in range(len(fault_traces)): # Skip faults with zero or null sliprate if sliprate[i] == "" or sliprate[i] == 0: continue simple_fault_id = i A = fault_lengths[i] * (float(lower_depth) - float(upper_depth)) # Calculate M_max from scaling relations scalrel = Leonard2014_SCR() max_mag = scalrel.get_median_mag(A, float(rake)) # print A # Calculate GR a values from slip rate if sliprate[i] != '""': # print sliprate[i] a_value, moment_rate = fault_slip_rate_GR_conversion.slip2GR( sliprate[i], A, float(b_value[i]), float(max_mag), M_min=0.0) append_rupture_geometry(output_xml, fault_traces[i], dips[i], simple_fault_id, faultnames[i], upper_depth, lower_depth, simple_fault_tectonic_region[i]) append_earthquake_information(output_xml, magnitude_scaling_relation, rupture_aspect_ratio, a_value, b_value[i], min_mag, max_mag, rake) # Close xml output_xml.append(' </sourceModel>') output_xml.append('</nrml>') # Add newlines output_xml = [oxml + '\n' for oxml in output_xml] return output_xml
def run_smoothing(grid_lims, config, catalogue, completeness_table, map_config, run): """Run all the smoothing :params config: Dictionary of configuration parameters. For more info see helmstetter_werner_2012 code and docs. """ completeness_string = 'comp' for ym in completeness_table: completeness_string += '_%i_%.1f' % (ym[0], ym[1]) smoother_filename = "Australia_Adaptive_K%i_b%.3f_mmin%.1f_%s.csv" % ( config['k'], config['bvalue'], config['mmin'], completeness_string) filename = smoother_filename[:-4] + '.xml' if os.path.exists(filename) and not overwrite: print '%s already created, not overwriting!' % filename return smoother = h_w.HelmstetterEtAl2007(grid_lims, config, catalogue, storage_file=("Aus1_tmp2%.3f_%s.hdf5" % (config['bvalue'], run))) smoother._get_catalogue_completeness_weights(completeness_table) smoother.build_distance_arrays() smoother.build_catalogue_2_grid_array() # Exhaustive smoothing exhaustive = False if exhaustive == True: params, poiss_llh = smoother.exhaustive_smoothing( np.arange(2, 10, 1), np.arange(1.0e-6, 1.0e-5, 2.0e-6)) print params, poiss_llh smoother.config["k"] = params[0] smoother.config["r_min"] = params[1] #print 'Exiting now, re-run using optimised parameters' #sys.exit() d_i = smoother.optimise_bandwidths() smoother.run_smoothing(config["r_min"], d_i) data = np.column_stack([smoother.grid, smoother.rates]) np.savetxt( smoother_filename, data, # np.column_stack([smoother.grid, smoother.rates]), delimiter=",", fmt=["%.4f", "%.4f", "%.8e"], header="longitude,latitude,rate") # Creating a basemap - input a cconfiguration and (if desired) a title title = 'Smoothed seismicity rate for learning \nperiod %i %i, K=%i, Mmin=%.1f' % ( config['learning_start'], config['learning_end'], smoother.config['k'], smoother.config['mmin']) basemap1 = HMTKBaseMap(map_config, title) basemap1.m.drawmeridians( np.arange(map_config['min_lat'], map_config['max_lat'], 5)) basemap1.m.drawparallels( np.arange(map_config['min_lon'], map_config['max_lon'], 5)) # Adding the smoothed grip to the basemap sym = (2., 3., 'cx') x, y = basemap1.m(smoother.grid[:, 0], smoother.grid[:, 1]) if smoother.config['mmin'] == 3.5: vmax = -1.0 elif smoother.config['mmin'] == 4.0: vmax = -2.5 else: vmax = -1.0 basemap1.m.scatter(x, y, marker='s', c=np.log10(smoother.rates), cmap=plt.cm.coolwarm, zorder=10, lw=0, vmin=-7.0, vmax=vmax) basemap1.m.drawcoastlines(linewidth=1, zorder=50) # Add coastline on top #basemap1.m.drawmeridians(np.arange(llat, ulat, 5)) #basemap1.m.drawparallels(np.arange(llon, ulon, 5)) plt.colorbar(label='Log10(Smoothed rate per cell)') #plt.colorbar()#label='log10(Smoothed rate per cell)') plt.legend() #basemap1.m.scatter(x, y, marker = 's', c = smoother.data[:,4], cmap = plt.cm.coolwarm, zorder=10) #basemap1.m.scatter([150],[22], marker='o') #basemap1.fig.show() #(smoother.data[0], smoother.data[1]) #basemap1.add_catalogue(catalogue_depth_clean, erlay=False) figname = smoother_filename[:-4] + '_smoothed_rates_map.png' plt.savefig(figname) source_list = [] #i=0 min_mag = 4.5 max_mag = 7.2 # Read in data again to solve number fomatting issue in smoother.data # For some reason it just returns 0 for all a values #data = np.genfromtxt(smoother_filename, delimiter = ',', skip_header = 1) tom = PoissonTOM( 50) # Dummy temporal occurence model for building pt sources msr = Leonard2014_SCR() for j in range(len(data[:, 2])): identifier = 'ASS' + str(j) + '_' + str(run) name = 'Helmstetter' + str(j) + '_' + str(run) point = Point(data[j, 0], data[j, 1], 10) rate = data[j, 2] # Convert rate to a value aval = np.log10(rate) + config['bvalue'] * config["mmin"] mfd = TruncatedGRMFD(min_mag, max_mag, 0.1, aval, config['bvalue']) hypo_depth_dist = PMF([(0.5, 10.0), (0.25, 5.0), (0.25, 15.0)]) nodal_plane_dist = PMF([(0.3, NodalPlane(0, 30, 90)), (0.2, NodalPlane(90, 30, 90)), (0.3, NodalPlane(180, 30, 90)), (0.2, NodalPlane(270, 30, 90))]) point_source = PointSource(identifier, name, 'Non_cratonic', mfd, 2, msr, 2.0, tom, 0.1, 20.0, point, nodal_plane_dist, hypo_depth_dist) source_list.append(point_source) mod_name = "Australia_Adaptive_K%i_b%.3f" % (smoother.config['k'], smoother.config['bvalue']) nodes = list(map(obj_to_node, sorted(source_list))) source_model = Node("sourceModel", {"name": name}, nodes=nodes) with open(filename, 'wb') as f: nrml.write([source_model], f, '%s', xmlns=NAMESPACE)
shp2nrml.append_xml_header(output_xml_geom, ('%s_geom_filtered' % source_model_name)) shp2nrml.append_xml_header(output_xml_all_methods, ('%s_all_methods_collapsed' % source_model_name)) for i, fault_trace in enumerate(fault_traces): # Get basic parameters fault_area = fault_lengths[i] * (float(lower_depth) - float(upper_depth)) sliprate = sliprates[i] trt = trts[i] faultname = faultnames[i] b_value = b_values[i] dip = dips[i] print 'Calculating rates for %s in domain %s' % (faultname, trt) # Calculate M_max from scaling relations scalrel = Leonard2014_SCR() max_mag = scalrel.get_median_mag(fault_area, float(rake)) # Round to nearest 0.05 mag unit max_mag = np.round((max_mag - 0.05), 1) + 0.05 print 'Maximum magnitude is %.3f' % max_mag # Append geometry information for output_xml in output_xmls: shp2nrml.append_rupture_geometry(output_xml, fault_trace, dip, i, faultname, upper_depth, lower_depth, trt) # Get truncated Gutenberg-Richter rates gr_mags, gr_rates, moment_rate = \ shp2nrml.sliprate2GR_incremental(sliprate, fault_area, b_value, max_mag,