Exemplo n.º 1
0
 def setUp(self):
     self.model = IsotropicGaussian()
     # Setup simple grid
     [gx, gy] = np.meshgrid(np.arange(35.5, 40., 0.5),
                            np.arange(40.5, 45., 0.5))
     ngp = np.shape(gx)[0] * np.shape(gx)[1]
     gx = np.reshape(gx, [ngp, 1])
     gy = np.reshape(gy, [ngp, 1])
     depths = 10. * np.ones(ngp)
     self.data = np.column_stack(
         [gx, gy, depths, np.zeros(ngp, dtype=float)])
Exemplo n.º 2
0
    def test_analysis_Frankel_comparison(self):
        '''
        To test the run_analysis function we compare test results with those
        from Frankel's fortran implementation, under the same conditions
        '''
        self.grid_limits = [-128., -113.0, 0.2, 30., 43.0, 0.2, 0., 100., 100.]
        comp_table = np.array([[1933., 4.0], [1900., 5.0], [1850., 6.0],
                               [1850., 7.0]])
        config = {'Length_Limit': 3., 'BandWidth': 50., 'increment': 0.1}
        self.model = SmoothedSeismicity(self.grid_limits, bvalue=0.8)
        self.catalogue = Catalogue()
        frankel_catalogue = np.genfromtxt(
            os.path.join(BASE_PATH, FRANKEL_TEST_CATALOGUE))
        self.catalogue.data['magnitude'] = frankel_catalogue[:, 0]
        self.catalogue.data['longitude'] = frankel_catalogue[:, 1]
        self.catalogue.data['latitude'] = frankel_catalogue[:, 2]
        self.catalogue.data['depth'] = frankel_catalogue[:, 3]
        self.catalogue.data['year'] = frankel_catalogue[:, 4]
        self.catalogue.end_year = 2006
        frankel_results = np.genfromtxt(
            os.path.join(BASE_PATH, FRANKEL_OUTPUT_FILE))
        # Run analysis
        output_data = self.model.run_analysis(
            self.catalogue,
            config,
            completeness_table=comp_table,
            smoothing_kernel=IsotropicGaussian())

        self.assertTrue(
            fabs(np.sum(output_data[:, -1]) -
                 np.sum(output_data[:, -2])) < 1.0)
        self.assertTrue(fabs(np.sum(output_data[:, -1]) - 390.) < 1.0)
 def setUp(self):
     self.model = IsotropicGaussian()
     # Setup simple grid
     [gx, gy] = np.meshgrid(np.arange(35.5, 40., 0.5),
                            np.arange(40.5, 45., 0.5))
     ngp = np.shape(gx)[0] * np.shape(gx)[1]
     gx = np.reshape(gx, [ngp, 1])
     gy = np.reshape(gy, [ngp, 1])
     depths = 10. * np.ones(ngp)
     self.data = np.column_stack([gx, gy, depths,
                                 np.zeros(ngp, dtype=float)])
Exemplo n.º 4
0
    comp_table = np.array([[1980, 3.], [1975, 3.5], [1975, 4.], [1965, 4.5],
                           [1965, 5.], [1860, 5.5], [1860, 6.]])

#config
config = {
    'Length_Limit': 3.,
    'BandWidth': 150.,
    'increment': True,
    'magnitude_increment': 0.5
}
#smoothing
o = model.run_analysis(
    catalogue,
    config,
    completeness_table=comp_table,
    smoothing_kernel=IsotropicGaussian(),
    #increment = False,
)

x = o[:, 0]
y = o[:, 1]
r = o[:, 4]  #/ (res**2)

r = np.array([np.log10(r) if r > 0 else np.NaN for r in r])
#r = np.log10(r)
r[r < 0] = 0.
#r =

#print np.sqrt(len(x))

from map import rate_map
class TestIsotropicGaussian(unittest.TestCase):
    '''
    Simple tests the of Isotropic Gaussian Kernel
    (as implemented by Frankel (1995))
    '''
    def setUp(self):
        self.model = IsotropicGaussian()
        # Setup simple grid
        [gx, gy] = np.meshgrid(np.arange(35.5, 40., 0.5),
                               np.arange(40.5, 45., 0.5))
        ngp = np.shape(gx)[0] * np.shape(gx)[1]
        gx = np.reshape(gx, [ngp, 1])
        gy = np.reshape(gy, [ngp, 1])
        depths = 10. * np.ones(ngp)
        self.data = np.column_stack([gx, gy, depths,
                                    np.zeros(ngp, dtype=float)])

    def test_kernel_single_event(self):
        # ensure kernel is smoothing values correctly for a single event
        self.data[50, 3] = 1.
        config = {'Length_Limit': 3.0, 'BandWidth': 30.0}
        expected_array = np.genfromtxt(os.path.join(BASE_PATH,
                                                    TEST_1_VALUE_FILE))
        (smoothed_array, sum_data, sum_smooth) = \
            self.model.smooth_data(self.data, config)
        raise unittest.SkipTest('Have Graeme fix this')
        np.testing.assert_array_almost_equal(expected_array, smoothed_array)
        self.assertAlmostEqual(sum_data, 1.)
        # Assert that sum of the smoothing is equal to the sum of the
        # data values to 3 dp
        self.assertAlmostEqual(sum_data, sum_smooth, 3)

    def test_kernel_multiple_event(self):
        # ensure kernel is smoothing values correctly for multiple events
        self.data[[5, 30, 65], 3] = 1.
        config = {'Length_Limit': 3.0, 'BandWidth': 30.0}
        expected_array = np.genfromtxt(os.path.join(BASE_PATH,
                                                    TEST_3_VALUE_FILE))
        (smoothed_array, sum_data, sum_smooth) = \
            self.model.smooth_data(self.data, config)
        raise unittest.SkipTest('Have Graeme fix this')
        np.testing.assert_array_almost_equal(expected_array, smoothed_array)
        self.assertAlmostEqual(sum_data, 3.)
        # Assert that sum of the smoothing is equal to the sum of the
        # data values to 3 dp
        self.assertAlmostEqual(sum_data, sum_smooth, 2)

    def test_kernel_single_event_3d(self):
        # ensure kernel is smoothing values correctly for a single event
        self.data[50, 3] = 1.
        self.data[50, 2] = 20.
        config = {'Length_Limit': 3.0, 'BandWidth': 30.0}
        expected_array = np.genfromtxt(os.path.join(BASE_PATH,
                                                    TEST_1_VALUE_3D_FILE))
        (smoothed_array, sum_data, sum_smooth) = \
            self.model.smooth_data(self.data, config, is_3d=True)
        raise unittest.SkipTest('Have Graeme fix this')
        np.testing.assert_array_almost_equal(expected_array, smoothed_array)
        self.assertAlmostEqual(sum_data, 1.)
        # Assert that sum of the smoothing is equal to the sum of the
        # data values to 2 dp
        self.assertAlmostEqual(sum_data, sum_smooth, 2)
Exemplo n.º 6
0
    def run_analysis(self,
                     catalogue,
                     config,
                     completeness_table=None,
                     smoothing_kernel=None):
        '''
        Runs an analysis of smoothed seismicity in the manner
        originally implemented by Frankel (1995)

        :param catalogue:
            Instance of the hmtk.seismicity.catalogue.Catalogue class
            catalogue.data dictionary containing the following -
            'year' - numpy.ndarray vector of years
            'longitude' - numpy.ndarray vector of longitudes
            'latitude' - numpy.ndarray vector of latitudes
            'depth' - numpy.ndarray vector of depths

        :param dict config:
            Configuration settings of the algorithm:
            * 'Length_Limit' - Maximum number of bandwidths for use in
                               smoothing (Float)
            * 'BandWidth' - Bandwidth (km) of the Smoothing Kernel (Float)
            * 'increment' - Output incremental (True) or cumulative a-value
                            (False)

        :param np.ndarray completeness_table:
            Completeness of the catalogue assuming evenly spaced magnitudes
            from most recent bin to oldest bin [year, magnitude]

        :param smoothing_kernel:
            Smoothing kernel as instance of :class:
                hmtk.seismicity.smoothing.kernels.base.BaseSmoothingKernel

        :returns:
            Full smoothed seismicity data as np.ndarray, of the form
            [Longitude, Latitude, Depth, Observed, Smoothed]
        '''

        self.catalogue = catalogue
        if smoothing_kernel:
            self.kernel = smoothing_kernel
        else:
            self.kernel = IsotropicGaussian()

        # If no grid limits are specified then take from catalogue
        if isinstance(self.grid_limits, list):
            self.grid_limits = Grid.make_from_list(self.grid_limits)
            assert self.grid_limits['xmax'] >= self.grid_limits['xmin']
            assert self.grid_limits['xspc'] > 0.0
            assert self.grid_limits['ymax'] >= self.grid_limits['ymin']
            assert self.grid_limits['yspc'] > 0.0
        elif isinstance(self.grid_limits, float):
            self.grid_limits = Grid.make_from_catalogue(
                self.catalogue, self.grid_limits,
                config['Length_Limit'] * config['BandWidth'])

        completeness_table, mag_inc = utils.get_even_magnitude_completeness(
            completeness_table, self.catalogue)

        end_year = self.catalogue.end_year

        # Get Weichert factor
        t_f, _ = utils.get_weichert_factor(self.beta, completeness_table[:, 1],
                                           completeness_table[:, 0], end_year)
        # Get the grid
        self.create_3D_grid(self.catalogue, completeness_table, t_f, mag_inc)
        if config['increment']:
            # Get Hermann adjustment factors
            fval, fival = utils.hermann_adjustment_factors(
                self.bval, completeness_table[0, 1], config['increment'])
            self.data[:, -1] = fval * fival * self.data[:, -1]

        # Apply smoothing
        smoothed_data, sum_data, sum_smooth = self.kernel.smooth_data(
            self.data, config, self.use_3d)
        print 'Smoothing Total Rate Comparison - ' \
            'Observed: %.6e, Smoothed: %.6e' % (sum_data, sum_smooth)
        self.data = np.column_stack([self.data, smoothed_data])
        return self.data
Exemplo n.º 7
0
class SmoothedSeismicity(object):
    '''
    Class to implement an analysis of Smoothed Seismicity, including the
    grid counting of data and the smoothing.

    :param np.ndarray grid:
        Observed count in each cell [Long., Lat., Depth., Count]

    :param catalogue:
        Valid instance of the :class: hmtk.seismicity.catalogue.Catalogue

    :param bool use_3d:
        Decide if analysis is 2-D (False) or 3-D (True). If 3-D then distances
        will use hypocentral distance, otherwise epicentral distance

    :param float bval:
        b-value

    :param float beta:
        Beta value for exponential form (beta = bval * log(10.))

    :param np.ndarray data:
        Smoothed seismicity output

    :param dict grid_limits:
        Limits ot the grid used for defining the cells
    '''
    def __init__(self, grid_limits, use_3d=False, bvalue=None):
        '''
        Instatiate class with a set of grid limits
        :param grid_limits:
            It could be a float (in that case the grid is computed from the
            catalogue with the given spacing).

            Or an array of the form:
            [xmin, xmax, spcx, ymin, ymax, spcy, zmin, spcz]

        :param bool use_3d:
            Choose whether to use hypocentral distances for smoothing or only
            epicentral

        :param float bval:
            b-value for analysis
        '''
        self.grid = None
        self.catalogue = None
        self.use_3d = use_3d
        self.bval = bvalue
        if self.bval:
            self.beta = self.bval * log(10.)
        else:
            self.beta = None
        self.data = None

        self.grid_limits = grid_limits
        self.kernel = None

    def run_analysis(self,
                     catalogue,
                     config,
                     completeness_table=None,
                     smoothing_kernel=None):
        '''
        Runs an analysis of smoothed seismicity in the manner
        originally implemented by Frankel (1995)

        :param catalogue:
            Instance of the hmtk.seismicity.catalogue.Catalogue class
            catalogue.data dictionary containing the following -
            'year' - numpy.ndarray vector of years
            'longitude' - numpy.ndarray vector of longitudes
            'latitude' - numpy.ndarray vector of latitudes
            'depth' - numpy.ndarray vector of depths

        :param dict config:
            Configuration settings of the algorithm:
            * 'Length_Limit' - Maximum number of bandwidths for use in
                               smoothing (Float)
            * 'BandWidth' - Bandwidth (km) of the Smoothing Kernel (Float)
            * 'increment' - Output incremental (True) or cumulative a-value
                            (False)

        :param np.ndarray completeness_table:
            Completeness of the catalogue assuming evenly spaced magnitudes
            from most recent bin to oldest bin [year, magnitude]

        :param smoothing_kernel:
            Smoothing kernel as instance of :class:
                hmtk.seismicity.smoothing.kernels.base.BaseSmoothingKernel

        :returns:
            Full smoothed seismicity data as np.ndarray, of the form
            [Longitude, Latitude, Depth, Observed, Smoothed]
        '''

        self.catalogue = catalogue
        if smoothing_kernel:
            self.kernel = smoothing_kernel
        else:
            self.kernel = IsotropicGaussian()

        # If no grid limits are specified then take from catalogue
        if isinstance(self.grid_limits, list):
            self.grid_limits = Grid.make_from_list(self.grid_limits)
            assert self.grid_limits['xmax'] >= self.grid_limits['xmin']
            assert self.grid_limits['xspc'] > 0.0
            assert self.grid_limits['ymax'] >= self.grid_limits['ymin']
            assert self.grid_limits['yspc'] > 0.0
        elif isinstance(self.grid_limits, float):
            self.grid_limits = Grid.make_from_catalogue(
                self.catalogue, self.grid_limits,
                config['Length_Limit'] * config['BandWidth'])

        completeness_table, mag_inc = utils.get_even_magnitude_completeness(
            completeness_table, self.catalogue)

        end_year = self.catalogue.end_year

        # Get Weichert factor
        t_f, _ = utils.get_weichert_factor(self.beta, completeness_table[:, 1],
                                           completeness_table[:, 0], end_year)
        # Get the grid
        self.create_3D_grid(self.catalogue, completeness_table, t_f, mag_inc)
        if config['increment']:
            # Get Hermann adjustment factors
            fval, fival = utils.hermann_adjustment_factors(
                self.bval, completeness_table[0, 1], config['increment'])
            self.data[:, -1] = fval * fival * self.data[:, -1]

        # Apply smoothing
        smoothed_data, sum_data, sum_smooth = self.kernel.smooth_data(
            self.data, config, self.use_3d)
        print 'Smoothing Total Rate Comparison - ' \
            'Observed: %.6e, Smoothed: %.6e' % (sum_data, sum_smooth)
        self.data = np.column_stack([self.data, smoothed_data])
        return self.data

    def create_2D_grid_simple(self,
                              longitude,
                              latitude,
                              year,
                              magnitude,
                              completeness_table,
                              t_f=1.,
                              mag_inc=0.1):
        '''
        Generates the grid from the limits using an approach closer to that of
        Frankel (1995)
        :param numpy.ndarray longitude:
            Vector of earthquake longitudes

        :param numpy.ndarray latitude:
            Vector of earthquake latitudes

        :param numpy.ndarray year:
            Vector of earthquake years

        :param numpy.ndarray magnitude:
            Vector of earthquake magnitudes

        :param numpy.ndarray completeness_table:
            Completeness table

        :param float t_f:
            Weichert adjustment factor


        :returns:
           Two-dimensional spatial grid of observed rates

        '''
        assert mag_inc > 0.

        xlim = np.ceil((self.grid_limits['xmax'] - self.grid_limits['xmin']) /
                       self.grid_limits['xspc'])
        ylim = np.ceil((self.grid_limits['ymax'] - self.grid_limits['ymin']) /
                       self.grid_limits['yspc'])
        ncolx = int(xlim)
        ncoly = int(ylim)
        grid_count = np.zeros(ncolx * ncoly, dtype=float)
        for iloc in range(0, len(longitude)):
            dlon = (longitude[iloc] - self.grid_limits['xmin']) /\
                self.grid_limits['xspc']
            if (dlon < 0.) or (dlon > xlim):
                # Earthquake outside longitude limits
                continue
            xcol = int(dlon)
            if xcol == ncolx:
                # If longitude is directly on upper grid line then retain
                xcol = ncolx - 1
            dlat = fabs(self.grid_limits['ymax'] - latitude[iloc]) /\
                self.grid_limits['yspc']
            if (dlat < 0.) or (dlat > ylim):
                # Earthquake outside latitude limits
                continue
            ycol = int(dlat)  # Correct for floating precision
            if ycol == ncoly:
                # If latitude is directly on upper grid line then retain
                ycol = ncoly - 1
            kmarker = (ycol * int(xlim)) + xcol
            adjust = _get_adjustment(magnitude[iloc], year[iloc],
                                     completeness_table[0, 1],
                                     completeness_table[:, 0], t_f, mag_inc)
            if adjust:
                grid_count[kmarker] = grid_count[kmarker] + adjust
        return grid_count

    def create_3D_grid(self,
                       catalogue,
                       completeness_table,
                       t_f=1.0,
                       mag_inc=0.1):
        '''
        Counts the earthquakes observed in a three dimensional grid


        :param catalogue:
            Instance of the hmtk.seismicity.catalogue.Catalogue class
            catalogue.data dictionary containing the following -
            'year' - numpy.ndarray vector of years
            'longitude' - numpy.ndarray vector of longitudes
            'latitude' - numpy.ndarray vector of latitudes
            'depth' - numpy.ndarray vector of depths

        :param np.ndarray completeness_table:
            Completeness of the catalogue assuming evenly spaced magnitudes
            from most recent bin to oldest bin [year, magnitude]

        :param float t_f:
            Weichert adjustment factor

        :param float mag_inc:
            Increment of the completeness magnitude (rendered 0.1)

        :returns:
           Three-dimensional spatial grid of observed rates (or two dimensional
           if only one depth layer is considered)

        '''
        x_bins = np.arange(self.grid_limits['xmin'], self.grid_limits['xmax'],
                           self.grid_limits['xspc'])
        if x_bins[-1] < self.grid_limits['xmax']:
            x_bins = np.hstack([x_bins, x_bins[-1] + self.grid_limits['xspc']])

        y_bins = np.arange(self.grid_limits['ymin'], self.grid_limits['ymax'],
                           self.grid_limits['yspc'])
        if y_bins[-1] < self.grid_limits['ymax']:
            y_bins = np.hstack([y_bins, y_bins[-1] + self.grid_limits['yspc']])

        z_bins = np.arange(self.grid_limits['zmin'],
                           self.grid_limits['zmax'] + self.grid_limits['zspc'],
                           self.grid_limits['zspc'])

        if z_bins[-1] < self.grid_limits['zmax']:
            z_bins = np.hstack([z_bins, z_bins[-1] + self.grid_limits['zspc']])

        # Define centre points of grid cells
        gridx, gridy = np.meshgrid((x_bins[1:] + x_bins[:-1]) / 2.,
                                   (y_bins[1:] + y_bins[:-1]) / 2.)

        n_x, n_y = np.shape(gridx)
        gridx = np.reshape(gridx, [n_x * n_y, 1])
        gridy = np.reshape(np.flipud(gridy), [n_x * n_y, 1])

        # Only one depth range
        idx = np.logical_and(catalogue.data['depth'] >= z_bins[0],
                             catalogue.data['depth'] < z_bins[1])
        mid_depth = (z_bins[0] + z_bins[1]) / 2.

        data_grid = np.column_stack([
            gridx, gridy, mid_depth * np.ones(n_x * n_y, dtype=float),
            self.create_2D_grid_simple(catalogue.data['longitude'][idx],
                                       catalogue.data['latitude'][idx],
                                       catalogue.data['year'][idx],
                                       catalogue.data['magnitude'][idx],
                                       completeness_table, t_f, mag_inc)
        ])

        if len(z_bins) < 3:
            # Only one depth range
            self.data = data_grid
            return

        # Multiple depth layers - append to grid
        for iloc in range(1, len(z_bins) - 1):
            idx = np.logical_and(catalogue.data['depth'] >= z_bins[iloc],
                                 catalogue.data['depth'] < z_bins[iloc + 1])
            mid_depth = (z_bins[iloc] + z_bins[iloc + 1]) / 2.

            temp_grid = np.column_stack([
                gridx, gridy, mid_depth * np.ones(n_x * n_y, dtype=float),
                self.create_2D_grid_simple(catalogue.data['longitude'][idx],
                                           catalogue.data['latitude'][idx],
                                           catalogue.data['year'][idx],
                                           catalogue.data['magnitude'][idx],
                                           completeness_table, t_f, mag_inc)
            ])

            data_grid = np.vstack([data_grid, temp_grid])
        self.data = data_grid

    def write_to_csv(self, filename):
        '''
        Exports to simple csv
        :param str filename:
            Path to file for export
        '''
        fid = open(filename, 'wt')
        # Create header list
        header_info = [
            'Longitude', 'Latitude', 'Depth', 'Observed Count',
            'Smoothed Rate', 'b-value'
        ]
        writer = csv.DictWriter(fid, fieldnames=header_info)
        headers = dict((name0, name0) for name0 in header_info)
        # Write to file
        writer.writerow(headers)
        for row in self.data:
            row_dict = {
                'Longitude': '%.5f' % row[0],
                'Latitude': '%.5f' % row[1],
                'Depth': '%.3f' % row[2],
                'Observed Count': '%.5e' % row[3],
                'Smoothed Rate': '%.5e' % row[4],
                'b-value': '%.4f' % self.bval
            }
            writer.writerow(row_dict)
        fid.close()
Exemplo n.º 8
0
class TestIsotropicGaussian(unittest.TestCase):
    '''
    Simple tests the of Isotropic Gaussian Kernel
    (as implemented by Frankel (1995))
    '''
    def setUp(self):
        self.model = IsotropicGaussian()
        # Setup simple grid
        [gx, gy] = np.meshgrid(np.arange(35.5, 40., 0.5),
                               np.arange(40.5, 45., 0.5))
        ngp = np.shape(gx)[0] * np.shape(gx)[1]
        gx = np.reshape(gx, [ngp, 1])
        gy = np.reshape(gy, [ngp, 1])
        depths = 10. * np.ones(ngp)
        self.data = np.column_stack(
            [gx, gy, depths, np.zeros(ngp, dtype=float)])

    def test_kernel_single_event(self):
        '''
        Tests to ensure kernel is smoothing values correctly for a single
        event
        '''
        self.data[50, 3] = 1.
        config = {'Length_Limit': 3.0, 'BandWidth': 30.0}
        expected_array = np.genfromtxt(
            os.path.join(BASE_PATH, TEST_1_VALUE_FILE))
        (smoothed_array, sum_data, sum_smooth) = \
            self.model.smooth_data(self.data, config)
        np.testing.assert_array_almost_equal(expected_array, smoothed_array)
        self.assertAlmostEqual(sum_data, 1.)
        # Assert that sum of the smoothing is equal to the sum of the
        # data values to 3 dp
        self.assertAlmostEqual(sum_data, sum_smooth, 3)

    def test_kernel_multiple_event(self):
        '''
        Tests to ensure kernel is smoothing values correctly for multiple
        events
        '''
        self.data[[5, 30, 65], 3] = 1.
        config = {'Length_Limit': 3.0, 'BandWidth': 30.0}
        expected_array = np.genfromtxt(
            os.path.join(BASE_PATH, TEST_3_VALUE_FILE))
        (smoothed_array, sum_data, sum_smooth) = \
            self.model.smooth_data(self.data, config)
        np.testing.assert_array_almost_equal(expected_array, smoothed_array)
        self.assertAlmostEqual(sum_data, 3.)
        # Assert that sum of the smoothing is equal to the sum of the
        # data values to 3 dp
        self.assertAlmostEqual(sum_data, sum_smooth, 2)

    def test_kernel_single_event_3d(self):
        '''
        Tests to ensure kernel is smoothing values correctly for a single
        event
        '''
        self.data[50, 3] = 1.
        self.data[50, 2] = 20.
        config = {'Length_Limit': 3.0, 'BandWidth': 30.0}
        expected_array = np.genfromtxt(
            os.path.join(BASE_PATH, TEST_1_VALUE_3D_FILE))
        (smoothed_array, sum_data, sum_smooth) = \
            self.model.smooth_data(self.data, config, is_3d=True)
        np.testing.assert_array_almost_equal(expected_array, smoothed_array)
        self.assertAlmostEqual(sum_data, 1.)
        # Assert that sum of the smoothing is equal to the sum of the
        # data values to 2 dp
        self.assertAlmostEqual(sum_data, sum_smooth, 2)
Exemplo n.º 9
0
    def run_analysis(self, catalogue, config, completeness_table=None, 
        smoothing_kernel=None, end_year=None):
        '''
        Runs an analysis of smoothed seismicity in the manner
        originally implemented by Frankel (1995)

        :param catalogue: 
            Instance of the hmtk.seismicity.catalogue.Catalogue class
            catalogue.data dictionary containing the following - 
            'year' - numpy.ndarray vector of years
            'longitude' - numpy.ndarray vector of longitudes
            'latitude' - numpy.ndarray vector of latitudes
            'depth' - numpy.ndarray vector of depths
        
        :param dict config:
            Configuration settings of the algorithm:
            * 'Length_Limit' - Maximum number of bandwidths for use in 
                               smoothing (Float)
            * 'BandWidth' - Bandwidth (km) of the Smoothing Kernel (Float)
            * 'increment' - Output incremental (True) or cumulative a-value
                            (False)

        :param np.ndarray completeness_table: 
            Completeness of the catalogue assuming evenly spaced magnitudes 
            from most recent bin to oldest bin [year, magnitude]

        :param smoothing_kernel:
            Smoothing kernel as instance of :class: 
                hmtk.seismicity.smoothing.kernels.base.BaseSmoothingKernel

        :param float end_year:
            Year considered as the final year for the analysis. If not given
            the program will automatically take the last year in the catalogue.

        :returns:
            Full smoothed seismicity data as np.ndarray, of the form 
            [Longitude, Latitude, Depth, Observed, Smoothed]
        '''
        
        self.catalogue = catalogue
        if smoothing_kernel:
            self.kernel = smoothing_kernel
        else:
            self.kernel = IsotropicGaussian()
        

        # If no grid limits are specified then take from catalogue
        if not isinstance(self.grid_limits, dict):
            self.get_grid_from_catalogue(config)

        completeness_table, mag_inc = utils.get_even_magnitude_completeness(
            completeness_table,
            self.catalogue)
        
        if not end_year:
            end_year = np.max(self.catalogue.data['year'])

        # Get Weichert factor
        t_f, _ = utils.get_weichert_factor(self.beta,
                                           completeness_table[:, 1],
                                           completeness_table[:, 0],
                                           end_year)
        # Get the grid
        self.create_3D_grid(self.catalogue, completeness_table, t_f, mag_inc)           
        if config['increment']:
            # Get Hermann adjustment factors
            fval, fival = utils.hermann_adjustment_factors(self.bval,
                completeness_table[0, 1], config['increment'])
            self.data[:, -1] = fval * fival * self.data[:, -1]

        # Apply smoothing
        smoothed_data, sum_data, sum_smooth = self.kernel.smooth_data(
            self.data, config, self.use_3d)
        print 'Smoothing Total Rate Comparison - ' \
            'Observed: %.6e, Smoothed: %.6e' % (sum_data, sum_smooth)
        self.data = np.column_stack([self.data, smoothed_data])
        return self.data
Exemplo n.º 10
0
class SmoothedSeismicity(object):
    '''
    Class to implement an analysis of Smoothed Seismicity, including the 
    grid counting of data and the smoothing.

    :param np.ndarray grid:
        Observed count in each cell [Long., Lat., Depth., Count]

    :param catalogue:
        Valid instance of the :class: hmtk.seismicity.catalogue.Catalogue

    :param bool use_3d:
        Decide if analysis is 2-D (False) or 3-D (True). If 3-D then distances
        will use hypocentral distance, otherwise epicentral distance

    :param float bval:
        b-value

    :param float beta:
        Beta value for exponential form (beta = bval * log(10.))

    :param np.ndarray data:
        Smoothed seismicity output

    :param dict grid_limits:
        Limits ot the grid used for defining the cells
    '''

    def __init__(self, grid_limits, use_3d=False, bvalue=None):
        '''
        Instatiate class with a set of grid limits
        :param list grid_limits: 
            Limits of the grid as 
            [xmin, xmax, spcx, ymin, ymax, spcy, zmin, spcz]
        
        :param bool use_3d:
            Choose whether to use hypocentral distances for smoothing or only
            epicentral
        
        :param float bval:
            b-value for analysis
        '''
        self.grid = None
        self.catalogue = None
        self.use_3d = use_3d
        self.bval = bvalue
        if self.bval:
            self.beta = self.bval * log(10.)
        else:
            self.beta = None
        self.data = None

        if isinstance(grid_limits, list) and len(grid_limits) == 9:
            self.grid_limits = {'xmin': grid_limits[0],
                                'xmax': grid_limits[1],
                                'xspc': grid_limits[2],
                                'ymin': grid_limits[3],
                                'ymax': grid_limits[4],
                                'yspc': grid_limits[5],
                                'zmin': grid_limits[6],
                                'zmax': grid_limits[7],
                                'zspc': grid_limits[8]}

            assert self.grid_limits['xmax'] >= self.grid_limits['xmin']
            assert self.grid_limits['xspc'] > 0.0
            assert self.grid_limits['ymax'] >= self.grid_limits['ymin']
            assert self.grid_limits['yspc'] > 0.0
        elif isinstance(grid_limits, float):
            # Only the spacing (in degrees) is entered
            self.grid_limits = grid_limits
        else:
            self.grid_limits = None
        self.kernel = None


    def run_analysis(self, catalogue, config, completeness_table=None, 
        smoothing_kernel=None, end_year=None):
        '''
        Runs an analysis of smoothed seismicity in the manner
        originally implemented by Frankel (1995)

        :param catalogue: 
            Instance of the hmtk.seismicity.catalogue.Catalogue class
            catalogue.data dictionary containing the following - 
            'year' - numpy.ndarray vector of years
            'longitude' - numpy.ndarray vector of longitudes
            'latitude' - numpy.ndarray vector of latitudes
            'depth' - numpy.ndarray vector of depths
        
        :param dict config:
            Configuration settings of the algorithm:
            * 'Length_Limit' - Maximum number of bandwidths for use in 
                               smoothing (Float)
            * 'BandWidth' - Bandwidth (km) of the Smoothing Kernel (Float)
            * 'increment' - Output incremental (True) or cumulative a-value
                            (False)

        :param np.ndarray completeness_table: 
            Completeness of the catalogue assuming evenly spaced magnitudes 
            from most recent bin to oldest bin [year, magnitude]

        :param smoothing_kernel:
            Smoothing kernel as instance of :class: 
                hmtk.seismicity.smoothing.kernels.base.BaseSmoothingKernel

        :param float end_year:
            Year considered as the final year for the analysis. If not given
            the program will automatically take the last year in the catalogue.

        :returns:
            Full smoothed seismicity data as np.ndarray, of the form 
            [Longitude, Latitude, Depth, Observed, Smoothed]
        '''
        
        self.catalogue = catalogue
        if smoothing_kernel:
            self.kernel = smoothing_kernel
        else:
            self.kernel = IsotropicGaussian()
        

        # If no grid limits are specified then take from catalogue
        if not isinstance(self.grid_limits, dict):
            self.get_grid_from_catalogue(config)

        completeness_table, mag_inc = utils.get_even_magnitude_completeness(
            completeness_table,
            self.catalogue)
        
        if not end_year:
            end_year = np.max(self.catalogue.data['year'])

        # Get Weichert factor
        t_f, _ = utils.get_weichert_factor(self.beta,
                                           completeness_table[:, 1],
                                           completeness_table[:, 0],
                                           end_year)
        # Get the grid
        self.create_3D_grid(self.catalogue, completeness_table, t_f, mag_inc)           
        if config['increment']:
            # Get Hermann adjustment factors
            fval, fival = utils.hermann_adjustment_factors(self.bval,
                completeness_table[0, 1], config['increment'])
            self.data[:, -1] = fval * fival * self.data[:, -1]

        # Apply smoothing
        smoothed_data, sum_data, sum_smooth = self.kernel.smooth_data(
            self.data, config, self.use_3d)
        print 'Smoothing Total Rate Comparison - ' \
            'Observed: %.6e, Smoothed: %.6e' % (sum_data, sum_smooth)
        self.data = np.column_stack([self.data, smoothed_data])
        return self.data

        

    def create_2D_grid_simple(self, longitude, latitude, year, magnitude, 
                              completeness_table, t_f=1., mag_inc=0.1):
        '''
        Generates the grid from the limits using an approach closer to that of 
        Frankel (1995)
        :param numpy.ndarray longitude:
            Vector of earthquake longitudes
        
        :param numpy.ndarray latitude:
            Vector of earthquake latitudes

        :param numpy.ndarray year:
            Vector of earthquake years
        
        :param numpy.ndarray magnitude:
            Vector of earthquake magnitudes

        :param numpy.ndarray completeness_table:
            Completeness table
        
        :param float t_f:
            Weichert adjustment factor

        
        :returns:
           Two-dimensional spatial grid of observed rates
        
        '''
        assert mag_inc > 0.

        xlim = (self.grid_limits['xmax'] - self.grid_limits['xmin']) /\
            self.grid_limits['xspc']
        ylim = (self.grid_limits['ymax'] - self.grid_limits['ymin']) /\
            self.grid_limits['yspc']
        ncolx = int(xlim)
        ncoly = int(ylim)
        grid_count = np.zeros(ncolx * ncoly, dtype=float)
        for iloc in range(0, len(longitude)):
            dlon = (longitude[iloc] - self.grid_limits['xmin']) /\
                self.grid_limits['xspc']
            if (dlon < 0.) or (dlon > xlim):
                # Earthquake outside longitude limits
                continue
            xcol = int(dlon)
            if xcol == ncolx:
                # If longitude is directly on upper grid line then retain
                xcol = ncolx - 1
            dlat = fabs(self.grid_limits['ymax'] - latitude[iloc]) /\
                self.grid_limits['yspc']
            if (dlat < 0.) or (dlat > ylim):
                # Earthquake outside latitude limits
                continue
            ycol = int(dlat) # Correct for floating precision
            if ycol == ncoly: 
                # If latitude is directly on upper grid line then retain
                ycol = ncoly - 1
            kmarker = (ycol * int(xlim)) + xcol
            adjust = _get_adjustment(magnitude[iloc],  
                                     year[iloc], 
                                     completeness_table[0, 1],
                                     completeness_table[:, 0],
                                     t_f, 
                                     mag_inc)
            if adjust:
                grid_count[kmarker] = grid_count[kmarker] + adjust
        return grid_count
    

    def create_3D_grid(self, catalogue, completeness_table, t_f=1.0, 
                       mag_inc=0.1):
        '''
        Counts the earthquakes observed in a three dimensional grid


        :param catalogue: 
            Instance of the hmtk.seismicity.catalogue.Catalogue class
            catalogue.data dictionary containing the following - 
            'year' - numpy.ndarray vector of years
            'longitude' - numpy.ndarray vector of longitudes
            'latitude' - numpy.ndarray vector of latitudes
            'depth' - numpy.ndarray vector of depths

        :param np.ndarray completeness_table: 
            Completeness of the catalogue assuming evenly spaced magnitudes 
            from most recent bin to oldest bin [year, magnitude]
    
        :param float t_f:
            Weichert adjustment factor
        
        :param float mag_inc:
            Increment of the completeness magnitude (rendered 0.1)

        :returns:
           Three-dimensional spatial grid of observed rates (or two dimensional
           if only one depth layer is considered)
            
        '''
        x_bins = np.arange(self.grid_limits['xmin'], 
                           self.grid_limits['xmax'], 
                           self.grid_limits['xspc'])
        if x_bins[-1] < self.grid_limits['xmax']:
            x_bins = np.hstack([x_bins, x_bins[-1] + self.grid_limits['xspc']])
            self.grid_limits['xmax'] = np.round(x_bins[-1], 5)
        
        y_bins = np.arange(self.grid_limits['ymin'], 
                           self.grid_limits['ymax'], 
                           self.grid_limits['yspc'])
        if y_bins[-1] < self.grid_limits['ymax']:
            y_bins = np.hstack([y_bins, y_bins[-1] + self.grid_limits['yspc']])
            self.grid_limits['ymax'] = np.round(y_bins[-1], 5)


        z_bins = np.arange(self.grid_limits['zmin'], 
                           self.grid_limits['zmax'] + self.grid_limits['zspc'], 
                           self.grid_limits['zspc'])

        if z_bins[-1] < self.grid_limits['zmax']:
            z_bins = np.hstack([z_bins, z_bins[-1] + self.grid_limits['zspc']])
            self.grid_limits['zmax'] = np.round(z_bins[-1], 5)
        # Define centre points of grid cells
        gridx, gridy = np.meshgrid((x_bins[1:] + x_bins[:-1]) / 2., 
                                    (y_bins[1:] + y_bins[:-1]) / 2.)

        n_x, n_y = np.shape(gridx)
        gridx = np.reshape(gridx, [n_x * n_y, 1])
        gridy = np.reshape(np.flipud(gridy), [n_x * n_y, 1])

        # Only one depth range
        idx = np.logical_and(catalogue.data['depth'] >= z_bins[0],
                             catalogue.data['depth'] < z_bins[1])
        mid_depth = (z_bins[0] + z_bins[1]) / 2.
        
        data_grid =  np.column_stack([
            gridx, 
            gridy,
            mid_depth * np.ones(n_x * n_y, dtype=float),
            self.create_2D_grid_simple(catalogue.data['longitude'][idx],
                                       catalogue.data['latitude'][idx],
                                       catalogue.data['year'][idx],
                                       catalogue.data['magnitude'][idx],
                                       completeness_table,
                                       t_f,
                                       mag_inc)])
        
        if len(z_bins) < 3:
            # Only one depth range
            self.data = data_grid
            return
        
        # Multiple depth layers - append to grid
        for iloc in range(1, len(z_bins) - 1):
            idx = np.logical_and(catalogue.data['depth'] >= z_bins[iloc],
                                 catalogue.data['depth'] < z_bins[iloc + 1])
            mid_depth = (z_bins[iloc] + z_bins[iloc + 1]) / 2.

            temp_grid = np.column_stack([
                gridx, 
                gridy, 
                mid_depth * np.ones(n_x * n_y, dtype=float),
                self.create_2D_grid_simple(catalogue.data['longitude'][idx],
                                           catalogue.data['latitude'][idx],
                                           catalogue.data['year'][idx],
                                           catalogue.data['magnitude'][idx],
                                           completeness_table,
                                           t_f,
                                           mag_inc)])
            
            data_grid =  np.vstack([data_grid, temp_grid])
        self.data = data_grid
            
    def get_grid_from_catalogue(self, config):
        '''
        Defines the grid on the basis of the catalogue
        '''
        # Get catalogue bounding box
        cat_bbox = get_catalogue_bounding_polygon(self.catalogue)
        # Dilate polygon by bandwidth * length_limit
        cat_bbox = cat_bbox.dilate(config['BandWidth'] * 
                                   config['Length_Limit'])
        # Define Grid spacing
        self.grid_limits = {'xmin': np.min(cat_bbox.lons),
                            'xmax': np.max(cat_bbox.lons),
                            'xspc': self.grid_limits,
                            'ymin': np.min(cat_bbox.lats),
                            'ymax': np.max(cat_bbox.lats),
                            'yspc': self.grid_limits}
        if self.use_3d:
            self.grid_limits['zmin'] = 0.
            self.grid_limits['zmax'] = np.max(self.catalogue['depth']) + 1E-5
            self.grid_limits['zspc'] = np.max(self.catalogue['depth'])

    def write_to_csv(self, filename):
        '''
        Exports to simple csv
        :param str filename:
            Path to file for export
        '''
        fid = open(filename, 'wt')
        # Create header list
        header_info = ['Longitude', 'Latitude', 'Depth', 'Observed Count', 
                       'Smoothed Rate', 'b-value']
        writer = csv.DictWriter(fid, fieldnames=header_info)
        headers = dict((name0, name0) for name0 in header_info)
        # Write to file
        writer.writerow(headers)
        for row in self.data:
            row_dict = {'Longitude': '%.5f' % row[0], 
                        'Latitude': '%.5f' % row[1], 
                        'Depth': '%.3f' % row[2],
                        'Observed Count': '%.5e' % row[3], 
                        'Smoothed Rate': '%.5e' % row[4], 
                        'b-value': '%.4f' % self.bval}
            writer.writerow(row_dict)
        fid.close()