Example #1
0
 def setUp(self):
     """
     Read the sample catalogue
     """
     flnme = 'afteran_test_catalogue.csv'
     filename = os.path.join(self.BASE_DATA_PATH, flnme)
     parser = CsvCatalogueParser(filename)
     self.cat = parser.read_file()
     self.dec = Afteran()
Example #2
0
class AfteranTestCase(unittest.TestCase):
    """
    Unit tests for the Afteran declustering algorithm class.
    """

    BASE_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')

    def setUp(self):
        """
        Read the sample catalogue
        """
        flnme = 'afteran_test_catalogue.csv'
        filename = os.path.join(self.BASE_DATA_PATH, flnme)
        parser = CsvCatalogueParser(filename)
        self.cat = parser.read_file()
        self.dec = Afteran()

    def test_dec_afteran(self):
        """
        Testing the Afteran algorithm
        """
        config = {
                  'time_distance_window' : GardnerKnopoffWindow(),
                  'time_window' : 60.,
                  }
        # Instantiate the declusterer and process the sample catalogue
        #self.dec = Afteran()
        print dir(self.dec)
        vcl, flagvector = self.dec.decluster(self.cat, config)
        print 'vcl:',vcl
        print 'flagvector:',flagvector, self.cat.data['flag']
        self.assertTrue(np.allclose(flagvector,self.cat.data['flag']))

    def test_find_aftershocks(self):
        '''
        Tests the find aftershocks function
        '''
        # Test when aftershocks are in array
        year_dec = np.array([0.10, 0.20, 0.5, 0.60, 0.80, 1.2])
        vsel = np.array([3, 4, 5])
        time_window = 0.25
        expected_result = (np.array([False, False, False, True, True, False]),
                           True)
        model_result = self.dec._find_aftershocks(vsel, year_dec, 0.25, 2, 6)
        self.assertTrue(np.all(expected_result[0] == model_result[0]))
        self.assertTrue(model_result[1])

        # Test when no aftershocks are found - reduce window to < 0.1
        expected_result = (
            np.array([False, False, False, False, False, False]), False)
        model_result = self.dec._find_aftershocks(vsel, year_dec, 0.09, 2, 6)
        self.assertTrue(np.all(expected_result[0] == model_result[0]))
        self.assertFalse(model_result[1])


    def test_find_foreshocks(self):
        '''
        Tests the find_foreshocks function
        '''
        # Test when aftershocks are in array
        year_dec = np.array([0.10, 0.40, 0.5, 0.60, 0.80, 1.2])
        vsel = np.array([0, 1])
        time_window = 0.25
        expected_result = (
            np.array([False, True, False, False, False, False]), True)
        model_result = self.dec._find_foreshocks(vsel, year_dec, 0.25, 2, 6)
        self.assertTrue(np.all(expected_result[0] == model_result[0]))
        self.assertTrue(model_result[1])

        # Test when no aftershocks are found - reduce window to < 0.1
        expected_result = (
            np.array([False, False, False, False, False, False]), False)
        model_result = self.dec._find_foreshocks(vsel, year_dec, 0.09, 2, 6)
        self.assertTrue(np.all(expected_result[0] == model_result[0]))
        self.assertFalse(model_result[1])
Example #3
0
def decluster_catalogue(catalogue, config):
    
    
    ### 
    ###    Catalogue cache or read/cache
    ###

    # Set up the declustering algorithm
    # Step 1 - set-up the tool
    if config['decluster_method'] == 'afteran':
        decluster_method = Afteran()
    elif config['decluster_method'] == 'gardner_knopoff':
        decluster_method = GardnerKnopoffType1()
    else:
        print "invalid decluster_method configuration: use [afteran|gardner_knopoff]"    
        return None 
    
    
    print 'Running declustering ...'
    cluster_vector, flag_vector = decluster_method.decluster(catalogue, config)
    print 'done!'
    print '%s clusters found' % np.max(cluster_vector)
    print '%s Non-poissionian events identified' % np.sum(flag_vector != 0)

    
    if config['plot']:
        ### 
        ###    Map Config 
        ###
        
        map_dpi = 90 
        add_geology = True
        add_sourcemodel = True
        savefig=False
        
        #map_title = 'Brazilian Seismic Zones'
        map_title = 'Clusters'
        #map_title = 'ISC-GEM Catalogue'
        #map_title = 'South-American Lithology'
        
        
        # Configure the limits of the map and the coastline resolution
        map_config = {'min_lon': -80.0, 'max_lon': -30.0, 'min_lat': -37.0, 'max_lat': 14.0, 'resolution':'l'}
        #map_config = {'min_lon': -72.0, 'max_lon': -68.0, 'min_lat': -22.0, 'max_lat': -18.0, 'resolution':'l'}
        #map_config = {'min_lon': -95.0, 'max_lon': -25.0, 'min_lat': -65.0, 'max_lat': 25.0, 'resolution':'l'}
        
        basemap = HMTKBaseMap(map_config, map_title, dpi=map_dpi)       
        #basemap.add_catalogue(catalogue, linewidth=0.2, alpha=0.1, overlay=True)
    
        idx = cluster_vector != 0
        x = catalogue.data['longitude'][idx]
        y = catalogue.data['latitude'][idx]
        c = cluster_vector[idx]
        
        basemap.add_colour_scaled_points(x, y, c, 
                                         overlay=True,
                                         shape='s', alpha=0.5, size=100, 
                                         linewidth=0.5, facecolor='none', 
                                         cmap=plt.cm.get_cmap('Paired'),
                                         )
    
        plt.show()

        if config['figname']:
            basemap.savemap(config['figname'])

    
    print 'Original catalogue had %s events' % catalogue.get_number_events()
    catalogue.select_catalogue_events(flag_vector == 0)
    print 'Purged catalogue now contains %s events' % catalogue.get_number_events()

    if config['filename']:
        writer = CsvCatalogueWriter(config['filename'])
        writer.write_file(catalogue)
    
    return catalogue
Example #4
0
declust_config = {
    'time_distance_window': GardnerKnopoffWindow(),
    'fs_time_prop': 1.0
}
print declust_config

print 'Running declustering ...'
vcl, flag_vector = gardner_knopoff.decluster(catalogue, declust_config)
print 'done!'
print '%s clusters found' % np.max(vcl)
print '%s Non-poissionian events identified' % np.sum(flag_vector != 0)

# In[ ]:

# Setup the algorithm
afteran = Afteran()
declust_config = {
    'time_distance_window': GardnerKnopoffWindow(),
    'time_window': 60.
}
print 'Running Afteran ...'
# Run Afteran
vcl2, flag_vector2 = afteran.decluster(catalogue, declust_config)
print 'done!'
print '%s clusters found' % np.max(vcl2)
print '%s Non-poissionian events identified' % np.sum(flag_vector2 != 0)

# In[ ]:

catalogue.select_catalogue_events(flag_vector == 0)
print 'Purged catalogue now contains %s events' % catalogue.get_number_events()
Example #5
0

print 'Running declustering ...'
vcl, flag_vector = gardner_knopoff.decluster(catalogue, declust_config)
print 'done!'
print '%s clusters found' % np.max(vcl)
print '%s Non-poissionian events identified' % np.sum(flag_vector != 0)





# In[ ]:

# Setup the algorithm
afteran = Afteran()
declust_config = {'time_distance_window': GardnerKnopoffWindow(),
                  'time_window': 60.}
print 'Running Afteran ...'
# Run Afteran
vcl2, flag_vector2 = afteran.decluster(catalogue, declust_config)
print 'done!'
print '%s clusters found' % np.max(vcl2)
print '%s Non-poissionian events identified' % np.sum(flag_vector2 != 0)



# In[ ]:

catalogue.select_catalogue_events(flag_vector == 0)
print 'Purged catalogue now contains %s events' % catalogue.get_number_events()