except OSError: pass config = { "k": 4, "r_min": 1.0E-6, "bvalue": bvalue, "mmin": 3.0, "learning_start": 1965, "learning_end": 2009, "target_start": 2009, "target_end": 2010 } smoother = h_w.HelmstetterEtAl2007(grid_lims, config, source.catalogue, storage_file=("Aus1_tmp2%.3f.hdf5" % bvalue)) smoother._get_catalogue_completeness_weights(completeness_table_a) smoother.build_distance_arrays() smoother.build_catalogue_2_grid_array() # Exhaustive smoothing exhaustive = False if exhaustive == True: params, poiss_llh = smoother.exhaustive_smoothing( np.arange(3, 6, 1), np.arange(1.0e-6, 1.0e-5, 2.0e-6)) print params, poiss_llh smoother.config["k"] = params[0] smoother.config["r_min"] = params[1] d_i = smoother.optimise_bandwidths()
os.remove("Aus1_tmp2.hdf5") except OSError: pass config = { "k": 3, "r_min": 1.0E-7, "bvalue": 1.0, "mmin": 3.0, "learning_start": 1990, "learning_end": 2003, "target_start": 2004, "target_end": 2013 } smoother = h_w.HelmstetterEtAl2007(grid_lims, config, source.catalogue, storage_file="Aus1_tmp2.hdf5") smoother._get_catalogue_completeness_weights(completeness_table_a) smoother.build_distance_arrays() smoother.build_catalogue_2_grid_array() # Exhaustive smoothing params, poiss_llh = smoother.exhaustive_smoothing( np.arange(3, 6, 1), np.arange(1.0e-6, 1.0e-5, 2.0e-6)) print params, poiss_llh smoother.config["k"] = params[0] smoother.config["r_min"] = params[1] d_i = smoother.optimise_bandwidths() smoother.run_smoothing(config["r_min"], d_i) np.savetxt("Australia_Adaptive_K3.csv", np.column_stack([smoother.grid, smoother.rates]),
def run_smoothing(grid_lims, config, catalogue, completeness_table, map_config, run): """Run all the smoothing :params config: Dictionary of configuration parameters. For more info see helmstetter_werner_2012 code and docs. """ completeness_string = 'comp' for ym in completeness_table: completeness_string += '_%i_%.1f' % (ym[0], ym[1]) smoother_filename = "Australia_Adaptive_K%i_b%.3f_mmin%.1f_%s.csv" % ( config['k'], config['bvalue'], config['mmin'], completeness_string) filename = smoother_filename[:-4] + '.xml' if os.path.exists(filename) and not overwrite: print '%s already created, not overwriting!' % filename return smoother = h_w.HelmstetterEtAl2007(grid_lims, config, catalogue, storage_file=("Aus1_tmp2%.3f_%s.hdf5" % (config['bvalue'], run))) smoother._get_catalogue_completeness_weights(completeness_table) smoother.build_distance_arrays() smoother.build_catalogue_2_grid_array() # Exhaustive smoothing exhaustive = False if exhaustive == True: params, poiss_llh = smoother.exhaustive_smoothing( np.arange(2, 10, 1), np.arange(1.0e-6, 1.0e-5, 2.0e-6)) print params, poiss_llh smoother.config["k"] = params[0] smoother.config["r_min"] = params[1] #print 'Exiting now, re-run using optimised parameters' #sys.exit() d_i = smoother.optimise_bandwidths() smoother.run_smoothing(config["r_min"], d_i) data = np.column_stack([smoother.grid, smoother.rates]) np.savetxt( smoother_filename, data, # np.column_stack([smoother.grid, smoother.rates]), delimiter=",", fmt=["%.4f", "%.4f", "%.8e"], header="longitude,latitude,rate") # Creating a basemap - input a cconfiguration and (if desired) a title title = 'Smoothed seismicity rate for learning \nperiod %i %i, K=%i, Mmin=%.1f' % ( config['learning_start'], config['learning_end'], smoother.config['k'], smoother.config['mmin']) basemap1 = HMTKBaseMap(map_config, title) basemap1.m.drawmeridians( np.arange(map_config['min_lat'], map_config['max_lat'], 5)) basemap1.m.drawparallels( np.arange(map_config['min_lon'], map_config['max_lon'], 5)) # Adding the smoothed grip to the basemap sym = (2., 3., 'cx') x, y = basemap1.m(smoother.grid[:, 0], smoother.grid[:, 1]) if smoother.config['mmin'] == 3.5: vmax = -1.0 elif smoother.config['mmin'] == 4.0: vmax = -2.5 else: vmax = -1.0 basemap1.m.scatter(x, y, marker='s', c=np.log10(smoother.rates), cmap=plt.cm.coolwarm, zorder=10, lw=0, vmin=-7.0, vmax=vmax) basemap1.m.drawcoastlines(linewidth=1, zorder=50) # Add coastline on top #basemap1.m.drawmeridians(np.arange(llat, ulat, 5)) #basemap1.m.drawparallels(np.arange(llon, ulon, 5)) plt.colorbar(label='Log10(Smoothed rate per cell)') #plt.colorbar()#label='log10(Smoothed rate per cell)') plt.legend() #basemap1.m.scatter(x, y, marker = 's', c = smoother.data[:,4], cmap = plt.cm.coolwarm, zorder=10) #basemap1.m.scatter([150],[22], marker='o') #basemap1.fig.show() #(smoother.data[0], smoother.data[1]) #basemap1.add_catalogue(catalogue_depth_clean, erlay=False) figname = smoother_filename[:-4] + '_smoothed_rates_map.png' plt.savefig(figname) source_list = [] #i=0 min_mag = 4.5 max_mag = 7.2 # Read in data again to solve number fomatting issue in smoother.data # For some reason it just returns 0 for all a values #data = np.genfromtxt(smoother_filename, delimiter = ',', skip_header = 1) tom = PoissonTOM( 50) # Dummy temporal occurence model for building pt sources msr = Leonard2014_SCR() for j in range(len(data[:, 2])): identifier = 'ASS' + str(j) + '_' + str(run) name = 'Helmstetter' + str(j) + '_' + str(run) point = Point(data[j, 0], data[j, 1], 10) rate = data[j, 2] # Convert rate to a value aval = np.log10(rate) + config['bvalue'] * config["mmin"] mfd = TruncatedGRMFD(min_mag, max_mag, 0.1, aval, config['bvalue']) hypo_depth_dist = PMF([(0.5, 10.0), (0.25, 5.0), (0.25, 15.0)]) nodal_plane_dist = PMF([(0.3, NodalPlane(0, 30, 90)), (0.2, NodalPlane(90, 30, 90)), (0.3, NodalPlane(180, 30, 90)), (0.2, NodalPlane(270, 30, 90))]) point_source = PointSource(identifier, name, 'Non_cratonic', mfd, 2, msr, 2.0, tom, 0.1, 20.0, point, nodal_plane_dist, hypo_depth_dist) source_list.append(point_source) mod_name = "Australia_Adaptive_K%i_b%.3f" % (smoother.config['k'], smoother.config['bvalue']) nodes = list(map(obj_to_node, sorted(source_list))) source_model = Node("sourceModel", {"name": name}, nodes=nodes) with open(filename, 'wb') as f: nrml.write([source_model], f, '%s', xmlns=NAMESPACE)
os.remove(hdf_filename) except OSError: pass config = { "k": 3, "r_min": 1.0E-7, "bvalue": bvalue, "mmin": 4.0, "learning_start": learning_start, "learning_end": learning_end, "target_start": target_end, "target_end": target_end } smoother = h_w.HelmstetterEtAl2007(grid_lims, config, source.catalogue, storage_file=hdf_filename) smoother._get_catalogue_completeness_weights(completeness_table_a) smoother.build_distance_arrays() smoother.build_catalogue_2_grid_array() # Exhaustive smoothing exhaustive = False if exhaustive == True: params, poiss_llh = smoother.exhaustive_smoothing( np.arange(3, 6, 1), np.arange(1.0e-6, 1.0e-5, 2.0e-6)) print params, poiss_llh smoother.config["k"] = params[0] smoother.config["r_min"] = params[1] #d_i = smoother.optimise_bandwidths() #poiss_llh, kagan_i0, kagan_i1 = smoother.run_smoothing(config["r_min"], d_i)
mmin = completeness_table_a[0][1] print 'mmin', mmin config = { "k": 3, "r_min": 1.0E-6, "bvalue": bvalue, "mmin": mmin, "learning_start": 1900, "learning_end": 2017, "target_start": 2018, "target_end": 2019 } # using already optimised parameters smoother = h_w.HelmstetterEtAl2007(grid_lims, config, catalogue_depth_clean, storage_file=("Aus1_tmp2%.3f.hdf5" % bvalue)) smoother._get_catalogue_completeness_weights(completeness_table_a) smoother.build_distance_arrays() smoother.build_catalogue_2_grid_array() # Exhaustive smoothing exhaustive = False if exhaustive == True: params, poiss_llh = smoother.exhaustive_smoothing( np.arange(2, 10, 1), np.arange(1.0e-6, 1.0e-5, 2.0e-6)) print params, poiss_llh smoother.config["k"] = params[0] smoother.config["r_min"] = params[1] #print 'Exiting now, re-run using optimised parameters'