def test_weichert_factor(self):
        '''
        Tests the Weichert adjustment factor to compensate for time varying
        completeness
        '''
        # Test 1: Comparison against the USGS Implementation
        beta = 0.8 * np.log(10.)
        end_year = 2006.
        comp_table = np.array([[1933., 4.0],
                               [1900., 5.0],
                               [1850., 6.0],
                               [1850., 7.0]])
        self.assertAlmostEqual(0.0124319686,
            utils.get_weichert_factor(beta, comp_table[:, 1], comp_table[:, 0],
                                      end_year)[0])

        # Test 2: Single value of completeness
        comp_table = np.array([[1960., 4.0]])
        self.assertAlmostEqual(1.0,
            utils.get_weichert_factor(beta, comp_table[:, 1], comp_table[:, 0],
                                      end_year)[0])
Exemple #2
0
    def test_weichert_factor(self):
        '''
        Tests the Weichert adjustment factor to compensate for time varying
        completeness
        '''
        # Test 1: Comparison against the USGS Implementation
        beta = 0.8 * np.log(10.)
        end_year = 2006.
        comp_table = np.array([[1933., 4.0], [1900., 5.0], [1850., 6.0],
                               [1850., 7.0]])
        self.assertAlmostEqual(
            0.0124319686,
            utils.get_weichert_factor(beta, comp_table[:, 1], comp_table[:, 0],
                                      end_year)[0])

        # Test 2: Single value of completeness
        comp_table = np.array([[1960., 4.0]])
        self.assertAlmostEqual(
            1. / (2006. - 1960. + 1.),
            utils.get_weichert_factor(beta, comp_table[:, 1], comp_table[:, 0],
                                      end_year)[0])
Exemple #3
0
    def run_analysis(self,
                     catalogue,
                     config,
                     completeness_table=None,
                     smoothing_kernel=None):
        '''
        Runs an analysis of smoothed seismicity in the manner
        originally implemented by Frankel (1995)

        :param catalogue:
            Instance of the hmtk.seismicity.catalogue.Catalogue class
            catalogue.data dictionary containing the following -
            'year' - numpy.ndarray vector of years
            'longitude' - numpy.ndarray vector of longitudes
            'latitude' - numpy.ndarray vector of latitudes
            'depth' - numpy.ndarray vector of depths

        :param dict config:
            Configuration settings of the algorithm:
            * 'Length_Limit' - Maximum number of bandwidths for use in
                               smoothing (Float)
            * 'BandWidth' - Bandwidth (km) of the Smoothing Kernel (Float)
            * 'increment' - Output incremental (True) or cumulative a-value
                            (False)

        :param np.ndarray completeness_table:
            Completeness of the catalogue assuming evenly spaced magnitudes
            from most recent bin to oldest bin [year, magnitude]

        :param smoothing_kernel:
            Smoothing kernel as instance of :class:
                hmtk.seismicity.smoothing.kernels.base.BaseSmoothingKernel

        :returns:
            Full smoothed seismicity data as np.ndarray, of the form
            [Longitude, Latitude, Depth, Observed, Smoothed]
        '''

        self.catalogue = catalogue
        if smoothing_kernel:
            self.kernel = smoothing_kernel
        else:
            self.kernel = IsotropicGaussian()

        # If no grid limits are specified then take from catalogue
        if isinstance(self.grid_limits, list):
            self.grid_limits = Grid.make_from_list(self.grid_limits)
            assert self.grid_limits['xmax'] >= self.grid_limits['xmin']
            assert self.grid_limits['xspc'] > 0.0
            assert self.grid_limits['ymax'] >= self.grid_limits['ymin']
            assert self.grid_limits['yspc'] > 0.0
        elif isinstance(self.grid_limits, float):
            self.grid_limits = Grid.make_from_catalogue(
                self.catalogue, self.grid_limits,
                config['Length_Limit'] * config['BandWidth'])

        completeness_table, mag_inc = utils.get_even_magnitude_completeness(
            completeness_table, self.catalogue)

        end_year = self.catalogue.end_year

        # Get Weichert factor
        t_f, _ = utils.get_weichert_factor(self.beta, completeness_table[:, 1],
                                           completeness_table[:, 0], end_year)
        # Get the grid
        self.create_3D_grid(self.catalogue, completeness_table, t_f, mag_inc)
        if config['increment']:
            # Get Hermann adjustment factors
            fval, fival = utils.hermann_adjustment_factors(
                self.bval, completeness_table[0, 1], config['increment'])
            self.data[:, -1] = fval * fival * self.data[:, -1]

        # Apply smoothing
        smoothed_data, sum_data, sum_smooth = self.kernel.smooth_data(
            self.data, config, self.use_3d)
        print 'Smoothing Total Rate Comparison - ' \
            'Observed: %.6e, Smoothed: %.6e' % (sum_data, sum_smooth)
        self.data = np.column_stack([self.data, smoothed_data])
        return self.data
Exemple #4
0
    def run_analysis(self, catalogue, config, completeness_table=None, 
        smoothing_kernel=None, end_year=None):
        '''
        Runs an analysis of smoothed seismicity in the manner
        originally implemented by Frankel (1995)

        :param catalogue: 
            Instance of the hmtk.seismicity.catalogue.Catalogue class
            catalogue.data dictionary containing the following - 
            'year' - numpy.ndarray vector of years
            'longitude' - numpy.ndarray vector of longitudes
            'latitude' - numpy.ndarray vector of latitudes
            'depth' - numpy.ndarray vector of depths
        
        :param dict config:
            Configuration settings of the algorithm:
            * 'Length_Limit' - Maximum number of bandwidths for use in 
                               smoothing (Float)
            * 'BandWidth' - Bandwidth (km) of the Smoothing Kernel (Float)
            * 'increment' - Output incremental (True) or cumulative a-value
                            (False)

        :param np.ndarray completeness_table: 
            Completeness of the catalogue assuming evenly spaced magnitudes 
            from most recent bin to oldest bin [year, magnitude]

        :param smoothing_kernel:
            Smoothing kernel as instance of :class: 
                hmtk.seismicity.smoothing.kernels.base.BaseSmoothingKernel

        :param float end_year:
            Year considered as the final year for the analysis. If not given
            the program will automatically take the last year in the catalogue.

        :returns:
            Full smoothed seismicity data as np.ndarray, of the form 
            [Longitude, Latitude, Depth, Observed, Smoothed]
        '''
        
        self.catalogue = catalogue
        if smoothing_kernel:
            self.kernel = smoothing_kernel
        else:
            self.kernel = IsotropicGaussian()
        

        # If no grid limits are specified then take from catalogue
        if not isinstance(self.grid_limits, dict):
            self.get_grid_from_catalogue(config)

        completeness_table, mag_inc = utils.get_even_magnitude_completeness(
            completeness_table,
            self.catalogue)
        
        if not end_year:
            end_year = np.max(self.catalogue.data['year'])

        # Get Weichert factor
        t_f, _ = utils.get_weichert_factor(self.beta,
                                           completeness_table[:, 1],
                                           completeness_table[:, 0],
                                           end_year)
        # Get the grid
        self.create_3D_grid(self.catalogue, completeness_table, t_f, mag_inc)           
        if config['increment']:
            # Get Hermann adjustment factors
            fval, fival = utils.hermann_adjustment_factors(self.bval,
                completeness_table[0, 1], config['increment'])
            self.data[:, -1] = fval * fival * self.data[:, -1]

        # Apply smoothing
        smoothed_data, sum_data, sum_smooth = self.kernel.smooth_data(
            self.data, config, self.use_3d)
        print 'Smoothing Total Rate Comparison - ' \
            'Observed: %.6e, Smoothed: %.6e' % (sum_data, sum_smooth)
        self.data = np.column_stack([self.data, smoothed_data])
        return self.data