def plot_sources(point_source, fault_source): llon, ulon, llat, ulat = 105, 155, -45, -5, map_config = {'min_lon': np.floor(llon), 'max_lon': np.ceil(ulon), 'min_lat': np.floor(llat), 'max_lat': np.ceil(ulat), 'resolution':'i'} basemap1 = HMTKBaseMap(map_config, 'Point and fault sources') for pt_source in point_source: x,y = basemap1.m(pt_source.location.longitude, pt_source.location.latitude) basemap1.m.plot(x, y, 'rs', markersize = 2.0) for simplefault in fault_source: trace_lons = np.array([pnt.longitude for pnt in simplefault.fault_trace.points]) trace_lats = np.array([pnt.latitude for pnt in simplefault.fault_trace.points]) x, y = basemap1.m(trace_lons, trace_lats) basemap1.m.plot(x, y, 'b', linewidth=1.3) basemap1.savemap('Point and fault sources.png')
def decluster_catalogue(catalogue, config): ### ### Catalogue cache or read/cache ### # Set up the declustering algorithm # Step 1 - set-up the tool if config['decluster_method'] == 'afteran': decluster_method = Afteran() elif config['decluster_method'] == 'gardner_knopoff': decluster_method = GardnerKnopoffType1() else: print "invalid decluster_method configuration: use [afteran|gardner_knopoff]" return None print 'Running declustering ...' cluster_vector, flag_vector = decluster_method.decluster(catalogue, config) print 'done!' print '%s clusters found' % np.max(cluster_vector) print '%s Non-poissionian events identified' % np.sum(flag_vector != 0) if config['plot']: ### ### Map Config ### map_dpi = 90 add_geology = True add_sourcemodel = True savefig=False #map_title = 'Brazilian Seismic Zones' map_title = 'Clusters' #map_title = 'ISC-GEM Catalogue' #map_title = 'South-American Lithology' # Configure the limits of the map and the coastline resolution map_config = {'min_lon': -80.0, 'max_lon': -30.0, 'min_lat': -37.0, 'max_lat': 14.0, 'resolution':'l'} #map_config = {'min_lon': -72.0, 'max_lon': -68.0, 'min_lat': -22.0, 'max_lat': -18.0, 'resolution':'l'} #map_config = {'min_lon': -95.0, 'max_lon': -25.0, 'min_lat': -65.0, 'max_lat': 25.0, 'resolution':'l'} basemap = HMTKBaseMap(map_config, map_title, dpi=map_dpi) #basemap.add_catalogue(catalogue, linewidth=0.2, alpha=0.1, overlay=True) idx = cluster_vector != 0 x = catalogue.data['longitude'][idx] y = catalogue.data['latitude'][idx] c = cluster_vector[idx] basemap.add_colour_scaled_points(x, y, c, overlay=True, shape='s', alpha=0.5, size=100, linewidth=0.5, facecolor='none', cmap=plt.cm.get_cmap('Paired'), ) plt.show() if config['figname']: basemap.savemap(config['figname']) print 'Original catalogue had %s events' % catalogue.get_number_events() catalogue.select_catalogue_events(flag_vector == 0) print 'Purged catalogue now contains %s events' % catalogue.get_number_events() if config['filename']: writer = CsvCatalogueWriter(config['filename']) writer.write_file(catalogue) return catalogue
# add source model #basemap1.add_source_model(source_model, area_border, border_width, point_marker, point_size, overlay) basemap1.add_source_model(source_model, overlay=True) ### ### Catálogo ### x = catalogue.data['longitude'] y = catalogue.data['latitude'] z = catalogue.data['depth'] _idx = np.argsort(z) catalogue.select_catalogue_events(_idx) basemap1.add_catalogue(catalogue, alpha=0.1) #basemap1.add_colour_scaled_points(x, y, np.log(z+1), overlay=True) if savefig: basemap1.savemap("/Users/pirchiner/Desktop/teste.png") plt.show() #exit() # Limit the catalogue to the time period 1960 - 2012 #valid_time = np.logical_and(catalogue.data['year'] >= 1960, # catalogue.data['year'] <= 2014) #catalogue.select_catalogue_events(valid_time)