def smooth_data(self, data, config, is_3d=False):
        '''
        Applies the smoothing kernel to the data
        :param np.ndarray data:
            Raw earthquake count in the form [Longitude, Latitude, Depth,
                Count]
        :param dict config:
            Configuration parameters must contain:
            * BandWidth: The bandwidth of the kernel (in km) (float)
            * Length_Limit: Maximum number of standard deviations

        :returns:
            * smoothed_value: np.ndarray vector of smoothed values
            * Total (summed) rate of the original values
            * Total (summed) rate of the smoothed values
        '''
        max_dist = config['Length_Limit'] * config['BandWidth']
        smoothed_value = np.zeros(len(data), dtype=float)
        for iloc in range(0, len(data)):
            dist_val = haversine(data[:, 0], data[:, 1], data[iloc, 0],
                                 data[iloc, 1])

            if is_3d:
                dist_val = np.sqrt(dist_val.flatten()**2.0 +
                                   (data[:, 2] - data[iloc, 2])**2.0)
            id0 = np.where(dist_val <= max_dist)[0]
            w_val = (np.exp(-(dist_val[id0]**2.0) /
                            (config['BandWidth']**2.))).flatten()
            smoothed_value[iloc] = np.sum(w_val * data[id0, 3]) / np.sum(w_val)
        return smoothed_value, np.sum(data[:, -1]), np.sum(smoothed_value)
    def smooth_data(self, data, config, is_3d=False):
        '''
        Applies the smoothing kernel to the data
        :param np.ndarray data:
            Raw earthquake count in the form [Longitude, Latitude, Depth,
                Count]
        :param dict config:
            Configuration parameters must contain:
            * BandWidth: The bandwidth of the kernel (in km) (float)
            * Length_Limit: Maximum number of standard deviations

        :returns:
            * smoothed_value: np.ndarray vector of smoothed values
            * Total (summed) rate of the original values
            * Total (summed) rate of the smoothed values
        '''
        max_dist = config['Length_Limit'] * config['BandWidth']
        smoothed_value = np.zeros(len(data), dtype=float)
        for iloc in range(0, len(data)):
            dist_val = haversine(data[:, 0], data[:, 1],
                                 data[iloc, 0], data[iloc, 1])

            if is_3d:
                dist_val = np.sqrt(dist_val.flatten() ** 2.0 +
                                   (data[:, 2] - data[iloc, 2]) ** 2.0)
            id0 = np.where(dist_val <= max_dist)[0]
            w_val = (np.exp(-(dist_val[id0] ** 2.0) /
                            (config['BandWidth'] ** 2.))).flatten()
            smoothed_value[iloc] = np.sum(w_val * data[id0, 3]) / np.sum(w_val)
        return smoothed_value, np.sum(data[:, -1]), np.sum(smoothed_value)
 def test_haversine(self):
     '''Tests the function utils.haversine
     Distances tested against i) Matlab implementation of the haversine 
                                 formula
                             ii) Matlab "distance" function (also based on
                                 the haversine formula (assumes 
                                 Earth Radius = 6371.0 not 6371.227 as 
                                 assumed here!)
     '''
     # Simple test
     self.longitude = np.arange(30., 40., 1.)
     self.latitude = np.arange(30., 40., 1.)
     distance = utils.haversine(self.longitude, self.latitude, 35.0, 35.0)
     expected_distance = np.array([[727.09474718], 
                                   [580.39194024],
                                   [434.3102452],
                                   [288.87035021],
                                   [144.09319874], 
                                   [0.],
                                   [143.38776088],
                                   [286.04831311],
                                   [427.95959077],
                                   [569.09922383]])
     self.assertTrue(np.allclose(distance, expected_distance))
     
     # 2-D test
     self.longitude = np.array([30., 35., 40.])
     self.latitude = np.array([30., 35., 40.])
     distance = utils.haversine(self.longitude, self.latitude, 
                                 self.longitude, self.latitude)
     expected_distance = np.array([[ 0., 727.09474718, 1435.38402047],
                                   [727.09474718, 0., 709.44452948],
                                   [1435.38402047, 709.44452948, 0.]])
     self.assertTrue(np.allclose(distance, expected_distance))
     # Crossing International Dateline
     self.longitude = np.array([179.5, 180.0, -179.5])
     self.latitude = np.array([45., 45., 45.])
     distance = utils.haversine(self.longitude, self.latitude, 179.9, 45.)
     expected_distance = np.array([[31.45176332], 
                                   [7.86294832], 
                                   [47.1775851]])
     self.assertTrue(np.allclose(distance, expected_distance))
Example #4
0
    def test_haversine(self):
        '''Tests the function utils.haversine
        Distances tested against i) Matlab implementation of the haversine
                                    formula
                                ii) Matlab "distance" function (also based on
                                    the haversine formula (assumes
                                    Earth Radius = 6371.0 not 6371.227 as
                                    assumed here!)
        '''
        # Simple test
        self.longitude = np.arange(30., 40., 1.)
        self.latitude = np.arange(30., 40., 1.)
        distance = utils.haversine(self.longitude, self.latitude, 35.0, 35.0)
        expected_distance = np.array([[727.09474718],
                                      [580.39194024],
                                      [434.3102452],
                                      [288.87035021],
                                      [144.09319874],
                                      [0.],
                                      [143.38776088],
                                      [286.04831311],
                                      [427.95959077],
                                      [569.09922383]])
        self.assertTrue(np.allclose(distance, expected_distance))

        # 2-D test
        self.longitude = np.array([30., 35., 40.])
        self.latitude = np.array([30., 35., 40.])
        distance = utils.haversine(self.longitude, self.latitude,
                                    self.longitude, self.latitude)
        expected_distance = np.array([[ 0., 727.09474718, 1435.38402047],
                                      [727.09474718, 0., 709.44452948],
                                      [1435.38402047, 709.44452948, 0.]])
        self.assertTrue(np.allclose(distance, expected_distance))
        # Crossing International Dateline
        self.longitude = np.array([179.5, 180.0, -179.5])
        self.latitude = np.array([45., 45., 45.])
        distance = utils.haversine(self.longitude, self.latitude, 179.9, 45.)
        expected_distance = np.array([[31.45176332],
                                      [7.86294832],
                                      [47.1775851]])
        self.assertTrue(np.allclose(distance, expected_distance))
Example #5
0
    def _get_bandwidth_data(self, magnitude_bin=0.5):

        # pegar catálogo corrigido pela completude

        # get data
        X = self.catalogue.data['magnitude']
        
        if not self.config['min_magnitude']:
            min_magnitude = min(X)
        else:
            min_magnitude = self.config['min_magnitude']
        
        if not self.config['max_magnitude']:
            max_magnitude = max(X)
        else:
            max_magnitude = self.config['max_magnitude']
            

        # divide bins catalog_bins
        bins = np.arange(min_magnitude, max_magnitude + magnitude_bin, magnitude_bin)

        h , m = [], []
        for b in bins:
            _i = np.logical_and(X > b, X < b + magnitude_bin)
            #print b
            #print X[_i]
            if len(X[_i]) > 0:
                # calculate distances on bin
                from hmtk.seismicity.utils import haversine
                d = haversine(self.catalogue.data['longitude'][_i], 
                              self.catalogue.data['latitude'][_i],
                              self.catalogue.data['longitude'][_i], 
                              self.catalogue.data['latitude'][_i])
                
                # média das distancias mínimas e centro do magnitude_bin
                m.append(b + magnitude_bin/ 2.)  
                h.append(np.sort(d)[:,1].mean()) 
        
        return {'distance' : h, 
                'magnitude': m }
    
        #from matplotlib import pylab as pl
        #pl.scatter(m, np.log(h))
        #pl.show()

        
        pass
Example #6
0
    def decluster(self, catalogue, config):
        """
        catalogue_matrix, window_opt=TDW_GARDNERKNOPOFF, time_window=60.):

        :param catalogue: a catalogue object
        :type catalogue: Instance of the hmtk.seismicity.catalogue.Catalogue()
                         class
        :keyword window_opt: method used in calculating distance and time
            windows
        :type window_opt: string
        :keyword time_window: Length (in days) of moving time window
        :type time_window: positive float
        :returns: **vcl vector** indicating cluster number,
                  **flagvector** indicating which earthquakes belong to a
                  cluster
        :rtype: numpy.ndarray
        """
        # Convert time window from days to decimal years
        time_window = config['time_window'] / 365.
        # Pre-processing steps are the same as for Gardner & Knopoff
        # Get relevent parameters
        mag = catalogue.data['magnitude']
        neq = np.shape(mag)[0]  # Number of earthquakes
        # Get decimal year (needed for time windows)
        year_dec = decimal_year(catalogue.data['year'],
                                catalogue.data['month'],
                                catalogue.data['day'])
        # Get space windows corresponding to each event
        sw_space, _ = (
            config['time_distance_window'].calc(catalogue.data['magnitude']))

        # Pre-allocate cluster index vectors
        vcl = np.zeros(neq, dtype=int)
        flagvector = np.zeros(neq, dtype=int)
        # Rank magnitudes into descending order
        id0 = np.flipud(np.argsort(mag, kind='heapsort'))

        iloc = 0
        clust_index = 0
        for imarker in id0:
            # Earthquake not allocated to cluster - perform calculation
            if vcl[imarker] == 0:
                # Perform distance calculation
                mdist = haversine(
                    catalogue.data['longitude'],
                    catalogue.data['latitude'],
                    catalogue.data['longitude'][imarker],
                    catalogue.data['latitude'][imarker]).flatten()

                # Select earthquakes inside distance window, later than
                # mainshock and not already assigned to a cluster
                vsel1 = np.where(
                    np.logical_and(vcl==0,
                        np.logical_and(mdist <= sw_space[imarker],
                                       year_dec > year_dec[imarker])))[0]
                has_aftershocks = False
                if len(vsel1) > 0:
                    # Earthquakes after event inside distance window
                    temp_vsel1, has_aftershocks = self._find_aftershocks(
                        vsel1,
                        year_dec,
                        time_window,
                        imarker,
                        neq)
                    if has_aftershocks:
                        flagvector[temp_vsel1] = 1
                        vcl[temp_vsel1] = clust_index + 1

                # Select earthquakes inside distance window, earlier than
                # mainshock and not already assigned to a cluster
                has_foreshocks = False
                vsel2 = np.where(
                    np.logical_and(vcl == 0,
                        np.logical_and(mdist <= sw_space[imarker],
                                       year_dec < year_dec[imarker])))[0]
                if len(vsel2) > 0:
                    # Earthquakes before event inside distance window
                    temp_vsel2, has_foreshocks = self._find_foreshocks(
                        vsel2,
                        year_dec,
                        time_window,
                        imarker,
                        neq)
                    if has_foreshocks:
                        flagvector[temp_vsel2] = -1
                        vcl[temp_vsel2] = clust_index + 1

                if has_aftershocks or has_foreshocks:
                    # Assign mainshock to cluster
                    vcl[imarker] = clust_index + 1
                    clust_index += 1
            iloc += 1

        return vcl, flagvector
Example #7
0
    def decluster(self, catalogue, config):
        """
        The configuration of this declustering algorithm requires two
        objects:
        - A time-distance window object (key is 'time_distance_window')
        - A value in the interval [0,1] expressing the fraction of the
        time window used for aftershocks (key is 'fs_time_prop')

        :param catalogue:
            Catalogue of earthquakes
        :type catalogue: Dictionary
        :param config:
            Configuration parameters
        :type config: Dictionary

        :returns:
          **vcl vector** indicating cluster number,
          **flagvector** indicating which eq events belong to a cluster
        :rtype: numpy.ndarray
        """
        # Get relevant parameters
        neq = len(catalogue.data['magnitude'])  # Number of earthquakes
        # Get decimal year (needed for time windows)
        year_dec = decimal_year(catalogue.data['year'],
                                catalogue.data['month'], catalogue.data['day'])
        # Get space and time windows corresponding to each event
        sw_space, sw_time = (config['time_distance_window'].calc(
            catalogue.data['magnitude']))
        # Initial Position Identifier
        eqid = np.arange(0, neq, 1)
        # Pre-allocate cluster index vectors
        vcl = np.zeros(neq, dtype=int)
        # Sort magnitudes into descending order
        id0 = np.flipud(
            np.argsort(catalogue.data['magnitude'], kind='heapsort'))
        longitude = catalogue.data['longitude'][id0]
        latitude = catalogue.data['latitude'][id0]
        sw_space = sw_space[id0]
        sw_time = sw_time[id0]
        year_dec = year_dec[id0]
        eqid = eqid[id0]
        flagvector = np.zeros(neq, dtype=int)
        # Begin cluster identification
        clust_index = 0
        for i in range(0, neq - 1):
            if vcl[i] == 0:
                # Find Events inside both fore- and aftershock time windows
                dt = year_dec - year_dec[i]
                vsel = np.logical_and(
                    vcl == 0,
                    np.logical_and(
                        dt >= (-sw_time[i] * config['fs_time_prop']),
                        dt <= sw_time[i]))
                # Of those events inside time window,
                # find those inside distance window
                vsel1 = haversine(longitude[vsel], latitude[vsel],
                                  longitude[i], latitude[i]) <= sw_space[i]
                vsel[vsel] = vsel1
                temp_vsel = np.copy(vsel)
                temp_vsel[i] = False
                if any(temp_vsel):
                    # Allocate a cluster number
                    vcl[vsel] = clust_index + 1
                    flagvector[vsel] = 1
                    # For those events in the cluster before the main event,
                    # flagvector is equal to -1
                    temp_vsel[dt >= 0.0] = False
                    flagvector[temp_vsel] = -1
                    flagvector[i] = 0
                    clust_index += 1

        # Re-sort the catalog_matrix into original order
        id1 = np.argsort(eqid, kind='heapsort')
        eqid = eqid[id1]
        vcl = vcl[id1]
        flagvector = flagvector[id1]

        return vcl, flagvector
    def decluster(self, catalogue, config):
        """
        The configuration of this declustering algorithm requires two 
        objects:
        - A time-distance window object (key is 'time_distance_window')
        - A value in the interval [0,1] expressing the fraction of the 
        time window used for aftershocks (key is 'fs_time_prop')
        
        :param catalogue: 
            Catalogue of earthquakes
        :type catalogue: Dictionary
        :param config: 
            Configuration parameters
        :type config: Dictionary

        :returns: 
          **vcl vector** indicating cluster number, 
          **flagvector** indicating which eq events belong to a cluster
        :rtype: numpy.ndarray
        """
        
        # Check declustering configuration
        self._check_config(config)
        # Get relevant parameters
        neq = len(catalogue.data['magnitude'])  # Number of earthquakes
        # Get decimal year (needed for time windows)
        year_dec = decimal_year(
             catalogue.data['year'], catalogue.data['month'], 
                catalogue.data['day'])
        # Get space and time windows corresponding to each event
        sw_space, sw_time = (
            config['time_distance_window'].calc(catalogue.data['magnitude']))
        # Initial Position Identifier
        eqid = np.arange(0, neq, 1)  
        # Pre-allocate cluster index vectors
        vcl = np.zeros(neq, dtype=int)
        # Sort magnitudes into descending order
        id0 = np.flipud(np.argsort(catalogue.data['magnitude'], 
                                   kind='heapsort'))
        longitude = catalogue.data['longitude'][id0]
        latitude = catalogue.data['latitude'][id0]
        sw_space = sw_space[id0]
        sw_time = sw_time[id0]
        year_dec = year_dec[id0]
        eqid = eqid[id0]
        flagvector = np.zeros(neq, dtype=int)
        # Begin cluster identification
        clust_index = 0
        for i in range(0, neq - 1):
            if vcl[i] == 0:
                # Find Events inside both fore- and aftershock time windows
                dt = year_dec - year_dec[i]
                vsel = np.logical_and(
                    vcl == 0,
                    np.logical_and(
                         dt >= (-sw_time[i] * config['fs_time_prop']),
                         dt <= sw_time[i])) 
                # Of those events inside time window, 
                # find those inside distance window
                vsel1 = haversine(longitude[vsel], 
                                  latitude[vsel], 
                                  longitude[i], 
                                  latitude[i]) <= sw_space[i]
                vsel[vsel] = vsel1
                temp_vsel = np.copy(vsel)
                temp_vsel[i] = False
                if any(temp_vsel):
                    # Allocate a cluster number
                    vcl[vsel] = clust_index + 1
                    flagvector[vsel] = 1
                    # For those events in the cluster before the main event,
                    # flagvector is equal to -1
                    temp_vsel[dt >= 0.0] = False
                    flagvector[temp_vsel] = -1
                    flagvector[i] = 0
                    clust_index += 1

        # Re-sort the catalog_matrix into original order
        id1 = np.argsort(eqid, kind='heapsort')
        eqid = eqid[id1]
        vcl = vcl[id1]
        flagvector = flagvector[id1]
        
        return vcl, flagvector
Example #9
0
def flag_dependent_events(catalogue, flagvector, doAftershocks, method):
    '''
    catalogue: dictionary of earthquakes in HMTK catalogue format, 
               parsed using CsvCatalogueParser
    flagvector: integer vector of length of catalogue
    doAftershocks: 
        if == True: decluster aftershocks
        if == False: decluster foreshocks
    method: either "Leonard08" or "Stein08"
    '''

    # get number of events
    neq = len(catalogue.data['magnitude'])

    # set periods of confidence
    test_day_1 = dt.datetime(
        1960, 1, 1
    )  # Earthquakes older than this are assumed to be very poorly located.
    test_day_2 = dt.datetime(
        1970, 1,
        1)  # Earthquakes older than this are assumed to be poorly located.

    # set delta-magnitude threshold
    delta_mag = 0.989  # Aftershocks must be less than 0.989 m.u. of the main shock (95% of the moment).

    # set time window
    if method == 'Leonard08':
        max_time_foreshock = 10**((catalogue.data['magnitude'] - 1.85) * 0.69)
        max_time_aftershock = 10**(
            (catalogue.data['magnitude'] - 2.70) * 1.1) + 4.0
    elif method == 'Stien08':
        max_time_foreshock = 10**(
            (catalogue.data['magnitude'] - 2.70) * 1.1) + 4.0
        max_time_aftershock = 10**(
            (catalogue.data['magnitude'] - 2.70) * 1.1) + 4.0

    # get event time datevector
    evdate = []
    for i in range(0, neq):

        # get event datetime
        evdate.append(dt.datetime(catalogue.data['year'][i], \
                                  catalogue.data['month'][i], \
                                  catalogue.data['day'][i]))
    evdate = np.array(evdate)

    if doAftershocks == True:
        print 'Flagging aftershocks...'
    else:
        print 'Flagging foreshocks...'

    # loop through earthquakes
    for i in range(0, neq):

        # set maximum distance window
        max_dist = 10**((catalogue.data['magnitude'][i]-4.317)*0.6) \
                         + 17.0/np.sqrt(catalogue.data['magnitude'][i])

        # set time-dependent distance cut-off
        if (evdate[i] <= test_day_1):
            max_dist = max_dist + 5.0
        elif (evdate[i] <= test_day_2):
            max_dist = max_dist + 10.0

        #########################################################################
        # flag aftershocks
        #########################################################################

        if doAftershocks == True:

            # for subsequent earthquakes, check distance from current event
            inter_evdist = haversine(catalogue.data['longitude'][i + 1:],
                                     catalogue.data['latitude'][i + 1:],
                                     catalogue.data['longitude'][i],
                                     catalogue.data['latitude'][i])

            # flatten distance array
            inter_evdist = inter_evdist.flatten()

            # get inter-event time in days
            inter_evtime = evdate[i + 1:] - evdate[i]
            inter_evdays = []
            for t in inter_evtime:
                inter_evdays.append(t.days)

            # get interevent magnitude
            inter_evmag = delta_mag*catalogue.data['magnitude'][i] \
                          - catalogue.data['magnitude'][i+1:]

            # now find aftershocks to flag
            idx = np.where((inter_evdist < max_dist) & (inter_evdays < max_time_aftershock[i]) \
                            & (inter_evmag > 0.0))[0]

            # set aftershock flag
            flagvector[i + 1 + idx] = 1

        #########################################################################
        # flag foreshocks
        #########################################################################

        elif doAftershocks == False:

            # for earlier earthquakes, check distance from current event
            inter_evdist = haversine(catalogue.data['longitude'][0:i],
                                     catalogue.data['latitude'][0:i],
                                     catalogue.data['longitude'][i],
                                     catalogue.data['latitude'][i])

            # flatten distance array
            inter_evdist = inter_evdist.flatten()

            # get inter-event time in days
            inter_evtime = evdate[i] - evdate[0:i]
            inter_evdays = []
            for t in inter_evtime:
                inter_evdays.append(t.days)

            # get interevent magnitude
            inter_evmag = delta_mag*catalogue.data['magnitude'][i] \
                          - catalogue.data['magnitude'][0:i]

            # now find aftershocks to flag
            idx = np.where((inter_evdist < max_dist) & (inter_evdays < max_time_foreshock[i]) \
                            & (inter_evmag > 0.0))[0]

            # set foreshock flag
            flagvector[idx] = 1

    return flagvector
Example #10
0
    def decluster(self, catalogue, config):
        """
        catalogue_matrix, window_opt=TDW_GARDNERKNOPOFF, time_window=60.):

        :param catalogue: a catalogue object
        :type catalogue: Instance of the hmtk.seismicity.catalogue.Catalogue()
                         class
        :keyword window_opt: method used in calculating distance and time 
            windows
        :type window_opt: string
        :keyword time_window: Length (in days) of moving time window
        :type time_window: positive float
        :returns: **vcl vector** indicating cluster number, 
                  **flagvector** indicating which earthquakes belong to a 
                  cluster
        :rtype: numpy.ndarray
        """
        # Convert time window from days to decimal years
        time_window = config['time_window'] / 365.
        # Pre-processing steps are the same as for Gardner & Knopoff
        # Get relevent parameters
        mag = catalogue.data['magnitude']
        neq = np.shape(mag)[0]  # Number of earthquakes
        # Get decimal year (needed for time windows)
        year_dec = decimal_year(catalogue.data['year'], 
                                catalogue.data['month'],
                                catalogue.data['day'])
        # Get space windows corresponding to each event
        sw_space, _ = (
            config['time_distance_window'].calc(catalogue.data['magnitude']))
        
        # Pre-allocate cluster index vectors
        vcl = np.zeros(neq, dtype=int)
        flagvector = np.zeros(neq, dtype=int)
        # Rank magnitudes into descending order
        id0 = np.flipud(np.argsort(mag, kind='heapsort'))
        
        iloc = 0
        clust_index = 0
        for imarker in id0:
            # Earthquake not allocated to cluster - perform calculation
            if vcl[imarker] == 0:
                # Perform distance calculation
                mdist = haversine(
                    catalogue.data['longitude'], 
                    catalogue.data['latitude'],
                    catalogue.data['longitude'][imarker],
                    catalogue.data['latitude'][imarker]).flatten()
                
                # Select earthquakes inside distance window, later than 
                # mainshock and not already assigned to a cluster
                vsel1 = np.where(
                    np.logical_and(vcl==0, 
                        np.logical_and(mdist <= sw_space[imarker], 
                                       year_dec > year_dec[imarker])))[0]
                has_aftershocks = False
                if len(vsel1) > 0:
                    # Earthquakes after event inside distance window
                    temp_vsel1, has_aftershocks = self._find_aftershocks(
                        vsel1,
                        year_dec,
                        time_window,
                        imarker,
                        neq)
                    if has_aftershocks:
                        flagvector[temp_vsel1] = 1
                        vcl[temp_vsel1] = clust_index + 1
                
                # Select earthquakes inside distance window, earlier than 
                # mainshock and not already assigned to a cluster
                has_foreshocks = False
                vsel2 = np.where(
                    np.logical_and(vcl == 0,
                        np.logical_and(mdist <= sw_space[imarker],
                                       year_dec < year_dec[imarker])))[0]
                if len(vsel2) > 0:
                    # Earthquakes before event inside distance window
                    temp_vsel2, has_foreshocks = self._find_foreshocks(
                        vsel2,
                        year_dec,
                        time_window,
                        imarker,
                        neq)
                    if has_foreshocks:
                        flagvector[temp_vsel2] = -1
                        vcl[temp_vsel2] = clust_index + 1

                if has_aftershocks or has_foreshocks:
                    # Assign mainshock to cluster 
                    vcl[imarker] = clust_index + 1  
                    clust_index += 1
            iloc += 1

        return vcl, flagvector
Example #11
0
    def run_analysis(self, catalogue, config, completeness_table=None, smoothing_kernel=IsotropicGaussianWoo):
        '''
        Runs an analysis of smoothed seismicity in the manner
        originally implemented by Frankel (1995)
 
        :param catalogue:
            Instance of the hmtk.seismicity.catalogue.Catalogue class
            catalogue.data dictionary containing the following -
            'year' - numpy.ndarray vector of years
            'longitude' - numpy.ndarray vector of longitudes
            'latitude' - numpy.ndarray vector of latitudes
            'depth' - numpy.ndarray vector of depths
            'magnitude' - numpy.ndarray vector of magnitudes
 
        :param dict config:
            Configuration settings of the algorithm:
            * 'Length_Limit' - Maximum number of bandwidths for use in
                               smoothing (Float)
            * 'BandWidth' - Bandwidth (km) of the Smoothing Kernel (Float)
            * 'increment' - Output incremental (True) or cumulative a-value
                            (False)
 
        :param np.ndarray completeness_table:
            Completeness of the catalogue assuming evenly spaced magnitudes
            from most recent bin to oldest bin [year, magnitude]
 
        :param smoothing_kernel:
            Smoothing kernel as instance of :class:
                hmtk.seismicity.smoothing.kernels.base.BaseSmoothingKernel
 
        :returns:
            Full smoothed seismicity data as np.ndarray, of the form
            [Longitude, Latitude, Depth, Observed, Smoothed]
        '''
        config['min_magnitude']
        
        self.catalogue = catalogue
        self.completeness_table = completeness_table
        self.config = config
        self.add_bandwith_values()
 
        use3d=config['use3d']
        cells, spacement = self._create_grid(use3d=use3d)
        k = Frankel_1995(self.c, self.d)
        
        ct, dm = utils.get_even_magnitude_completeness(completeness_table, 
                                                       catalogue, 
                                                       magnitude_increment=0.5)

        last_year = catalogue.end_year
                
        # get data
        X = self.catalogue.data['magnitude']
        magnitude_bin = dm
        
        min_magnitude = self.config['min_magnitude'] if self.config['min_magnitude'] else min(X)
        max_magnitude = self.config['max_magnitude'] if self.config['max_magnitude'] else max(X)
        
        # divide bins catalog_bins
        bins = np.arange(min_magnitude, max_magnitude + magnitude_bin, magnitude_bin)

        h , m = [], []
        for b in bins:
            _h = k.H(b + magnitude_bin/2.) * self.config['bandwidth_h_limit']
            _m = b + magnitude_bin/2.

            observation_time = self._get_observation_time(_m, ct, last_year)
  
            fid = open('/Users/pirchiner/Desktop/tmp_woo.%s.csv'%_m, 'wt')
            # Create header list
            header_info = ['Longitude', 'Latitude', 'Depth', 'Magnitude','Rate']
            writer = csv.DictWriter(fid, fieldnames=header_info)
            headers = dict((name0, name0) for name0 in header_info)
            # Write to file
            writer.writerow(headers)
            
            _i = np.logical_and( X >= _m - magnitude_bin/2., 
                                 X  < _m + magnitude_bin/2.)
            
            print _m, observation_time

            for c in cells:
                x0 = c[0]
                y0 = c[1]
                z0 = 0 if not use3d else c[2]  
    
                r = haversine(np.array(x0), 
                              np.array(y0), 
                              catalogue.data['longitude'][_i],
                              catalogue.data['latitude'][_i])
                #print _h
                _j = np.logical_and(0 < r, r <= _h)
                #print len(r), len(_j)
                r = r[_j]

                _k = k.kernel(_m, r) / observation_time
                
                rate = _k.sum()
                
                #print len(_k), _k.sum()
                #print x0, y0, _m, _k.sum()
                row_dict = {'Longitude': '%.5f' % x0,
                            'Latitude': '%.5f' % y0,
                            'Depth': '%.3f' % 0,
                            'Magnitude': '%.5e' % _m,
                            'Rate': '%.5e' % rate }
                writer.writerow(row_dict)

            fid.close()