def test_analysis_Frankel_comparison(self): ''' To test the run_analysis function we compare test results with those from Frankel's fortran implementation, under the same conditions ''' self.grid_limits = [-128., -113.0, 0.2, 30., 43.0, 0.2, 0., 100., 100.] comp_table = np.array([[1933., 4.0], [1900., 5.0], [1850., 6.0], [1850., 7.0]]) config = {'Length_Limit': 3., 'BandWidth': 50., 'increment': 0.1} self.model = SmoothedSeismicity(self.grid_limits, bvalue=0.8) self.catalogue = Catalogue() frankel_catalogue = np.genfromtxt( os.path.join(BASE_PATH, FRANKEL_TEST_CATALOGUE)) self.catalogue.data['magnitude'] = frankel_catalogue[:, 0] self.catalogue.data['longitude'] = frankel_catalogue[:, 1] self.catalogue.data['latitude'] = frankel_catalogue[:, 2] self.catalogue.data['depth'] = frankel_catalogue[:, 3] self.catalogue.data['year'] = frankel_catalogue[:, 4] self.catalogue.end_year = 2006 frankel_results = np.genfromtxt( os.path.join(BASE_PATH, FRANKEL_OUTPUT_FILE)) # Run analysis output_data = self.model.run_analysis( self.catalogue, config, completeness_table=comp_table, smoothing_kernel=IsotropicGaussian()) self.assertTrue( fabs(np.sum(output_data[:, -1]) - np.sum(output_data[:, -2])) < 1.0) self.assertTrue(fabs(np.sum(output_data[:, -1]) - 390.) < 1.0)
def setUp(self): self.model = IsotropicGaussian() # Setup simple grid [gx, gy] = np.meshgrid(np.arange(35.5, 40., 0.5), np.arange(40.5, 45., 0.5)) ngp = np.shape(gx)[0] * np.shape(gx)[1] gx = np.reshape(gx, [ngp, 1]) gy = np.reshape(gy, [ngp, 1]) depths = 10. * np.ones(ngp) self.data = np.column_stack( [gx, gy, depths, np.zeros(ngp, dtype=float)])
comp_table = np.array([[1980, 3.], [1975, 3.5], [1975, 4.], [1965, 4.5], [1965, 5.], [1860, 5.5], [1860, 6.]]) #config config = { 'Length_Limit': 3., 'BandWidth': 150., 'increment': True, 'magnitude_increment': 0.5 } #smoothing o = model.run_analysis( catalogue, config, completeness_table=comp_table, smoothing_kernel=IsotropicGaussian(), #increment = False, ) x = o[:, 0] y = o[:, 1] r = o[:, 4] #/ (res**2) r = np.array([np.log10(r) if r > 0 else np.NaN for r in r]) #r = np.log10(r) r[r < 0] = 0. #r = #print np.sqrt(len(x)) from map import rate_map
def run_analysis(self, catalogue, config, completeness_table=None, smoothing_kernel=None): ''' Runs an analysis of smoothed seismicity in the manner originally implemented by Frankel (1995) :param catalogue: Instance of the hmtk.seismicity.catalogue.Catalogue class catalogue.data dictionary containing the following - 'year' - numpy.ndarray vector of years 'longitude' - numpy.ndarray vector of longitudes 'latitude' - numpy.ndarray vector of latitudes 'depth' - numpy.ndarray vector of depths :param dict config: Configuration settings of the algorithm: * 'Length_Limit' - Maximum number of bandwidths for use in smoothing (Float) * 'BandWidth' - Bandwidth (km) of the Smoothing Kernel (Float) * 'increment' - Output incremental (True) or cumulative a-value (False) :param np.ndarray completeness_table: Completeness of the catalogue assuming evenly spaced magnitudes from most recent bin to oldest bin [year, magnitude] :param smoothing_kernel: Smoothing kernel as instance of :class: hmtk.seismicity.smoothing.kernels.base.BaseSmoothingKernel :returns: Full smoothed seismicity data as np.ndarray, of the form [Longitude, Latitude, Depth, Observed, Smoothed] ''' self.catalogue = catalogue if smoothing_kernel: self.kernel = smoothing_kernel else: self.kernel = IsotropicGaussian() # If no grid limits are specified then take from catalogue if isinstance(self.grid_limits, list): self.grid_limits = Grid.make_from_list(self.grid_limits) assert self.grid_limits['xmax'] >= self.grid_limits['xmin'] assert self.grid_limits['xspc'] > 0.0 assert self.grid_limits['ymax'] >= self.grid_limits['ymin'] assert self.grid_limits['yspc'] > 0.0 elif isinstance(self.grid_limits, float): self.grid_limits = Grid.make_from_catalogue( self.catalogue, self.grid_limits, config['Length_Limit'] * config['BandWidth']) completeness_table, mag_inc = utils.get_even_magnitude_completeness( completeness_table, self.catalogue) end_year = self.catalogue.end_year # Get Weichert factor t_f, _ = utils.get_weichert_factor(self.beta, completeness_table[:, 1], completeness_table[:, 0], end_year) # Get the grid self.create_3D_grid(self.catalogue, completeness_table, t_f, mag_inc) if config['increment']: # Get Hermann adjustment factors fval, fival = utils.hermann_adjustment_factors( self.bval, completeness_table[0, 1], config['increment']) self.data[:, -1] = fval * fival * self.data[:, -1] # Apply smoothing smoothed_data, sum_data, sum_smooth = self.kernel.smooth_data( self.data, config, self.use_3d) print 'Smoothing Total Rate Comparison - ' \ 'Observed: %.6e, Smoothed: %.6e' % (sum_data, sum_smooth) self.data = np.column_stack([self.data, smoothed_data]) return self.data