示例#1
0
 def setUp(self):
     self.context = Context(get_data_path(
         'config.yml', DATA_DIR))
     self.eq_catalog_filename = get_data_path(
         'ISC_small_data.csv', DATA_DIR)
     self.smodel_filename = get_data_path(
         'area_source_model.xml', DATA_DIR)
示例#2
0
    def test_stepp(self):
        self.context.config['eq_catalog_file'] = get_data_path(
            'completeness_input_test.csv', DATA_DIR)

        self.context.config['Stepp']['time_window'] = 5
        self.context.config['Stepp']['magnitude_windows'] = 0.1
        self.context.config['Stepp']['sensitivity'] = 0.2
        self.context.config['Stepp']['increment_lock'] = True

        read_eq_catalog(self.context)
        create_catalog_matrix(self.context)

        filtered_eq_events = np.array([
                    [4.0, 1994.], [4.1, 1994.], [4.2, 1994.],
                    [4.3, 1994.], [4.4, 1994.], [4.5, 1964.],
                    [4.6, 1964.], [4.7, 1964.], [4.8, 1964.],
                    [4.9, 1964.], [5.0, 1964.], [5.1, 1964.],
                    [5.2, 1964.], [5.3, 1964.], [5.4, 1964.],
                    [5.5, 1919.], [5.6, 1919.], [5.7, 1919.],
                    [5.8, 1919.], [5.9, 1919.], [6.0, 1919.],
                    [6.1, 1919.], [6.2, 1919.], [6.3, 1919.],
                    [6.4, 1919.], [6.5, 1919.], [6.6, 1919.],
                    [6.7, 1919.], [6.8, 1919.], [6.9, 1919.],
                    [7.0, 1919.], [7.1, 1919.], [7.2, 1919.],
                    [7.3, 1919.]])

        stepp(self.context)
        self.assertTrue(np.allclose(filtered_eq_events,
                self.context.completeness_table))

        gardner_knopoff(self.context)
        stepp(self.context)
        self.assertTrue(np.allclose(filtered_eq_events,
                self.context.completeness_table))
示例#3
0
    def test_gardner_knopoff(self):

        self.context.config['eq_catalog_file'] = get_data_path(
            'declustering_input_test.csv', DATA_DIR)
        self.context.config['GardnerKnopoff']['time_dist_windows'] = \
                'GardnerKnopoff'
        self.context.config['GardnerKnopoff']['foreshock_time_window'] = 0.5

        read_eq_catalog(self.context)
        create_catalog_matrix(self.context)

        expected_vmain_shock = np.delete(
            self.context.catalog_matrix, [4, 10, 19], 0)

        expected_vcl = np.array([0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0,
            0, 0, 0, 0, 6])

        expected_flag_vector = np.array([0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0,
            0, 0, 0, 0, 0, 0, 1])

        gardner_knopoff(self.context)

        self.assertTrue(np.array_equal(expected_vmain_shock,
                self.context.catalog_matrix))
        self.assertTrue(np.array_equal(expected_vcl, self.context.vcl))
        self.assertTrue(np.array_equal(expected_flag_vector,
                self.context.flag_vector))
示例#4
0
    def setUp(self):
        self.correct_filename = get_data_path('ISC_small_data.csv', DATA_DIR)

        self.csv_reader = CsvReader(self.correct_filename)

        self.first_data_row = [
            '1', 'AAA', '20000102034913', '2000', '01', '02', '03', '49', '13',
            '0.02', '7.282', '44.368', '2.43', '1.01', '298', '9.3', '0.5',
            '1.71', '0.355', '   ', '   ', '   ', '   ', '1.7', '0.1'
        ]
示例#5
0
    def setUp(self):
        self.area_source_nrml = get_data_path('area_source_model.xml',
                                              DATA_DIR)
        self.simple_fault_nrml = get_data_path('simple_fault_source_model.xml',
                                               DATA_DIR)
        self.complex_fault_nrml = get_data_path('complex_source_model.xml',
                                                DATA_DIR)
        self.simple_point_nrml = get_data_path('simple_point_source_model.xml',
                                               DATA_DIR)
        self.incorrect_nrml = get_data_path('incorrect_area_source_model.xml',
                                            DATA_DIR)
        self.schema = get_data_path('nrml.xsd', SCHEMA_DIR)

        self.area_source_reader = NRMLReader(self.area_source_nrml,
                                             self.schema)
        self.simple_fault_reader = NRMLReader(self.simple_fault_nrml,
                                              self.schema)
        self.complex_fault_reader = NRMLReader(self.complex_fault_nrml,
                                               self.schema)
        self.simple_point_reader = NRMLReader(self.simple_point_nrml,
                                              self.schema)

        self.gen_as = self.area_source_reader.read().next()
        self.gen_sf = self.simple_fault_reader.read().next()
        self.gen_cf = self.complex_fault_reader.read().next()
        self.gen_sp = self.simple_point_reader.read().next()
示例#6
0
    def setUp(self):
        self.area_source_nrml = get_data_path(
            'area_source_model.xml', DATA_DIR)
        self.simple_fault_nrml = get_data_path(
            'simple_fault_source_model.xml', DATA_DIR)
        self.complex_fault_nrml = get_data_path(
            'complex_source_model.xml', DATA_DIR)
        self.simple_point_nrml = get_data_path(
            'simple_point_source_model.xml', DATA_DIR)
        self.incorrect_nrml = get_data_path(
            'incorrect_area_source_model.xml', DATA_DIR)
        self.schema = get_data_path('nrml.xsd', SCHEMA_DIR)

        self.area_source_reader = NRMLReader(self.area_source_nrml,
                self.schema)
        self.simple_fault_reader = NRMLReader(self.simple_fault_nrml,
                self.schema)
        self.complex_fault_reader = NRMLReader(self.complex_fault_nrml,
                self.schema)
        self.simple_point_reader = NRMLReader(self.simple_point_nrml,
                self.schema)

        self.gen_as = self.area_source_reader.read().next()
        self.gen_sf = self.simple_fault_reader.read().next()
        self.gen_cf = self.complex_fault_reader.read().next()
        self.gen_sp = self.simple_point_reader.read().next()
示例#7
0
    def setUp(self):

        self.first_data_row = [
            1, 'AAA', 20000102034913, 2000, 01, 02, 03, 49, 13, 0.02, 7.282,
            44.368, 2.43, 1.01, 298, 9.3, 0.5, 1.71, 0.355, '', '', '', '',
            1.7, 0.1
        ]

        self.data_row_to_convert = [
            '2', 'AAA', '20000105132157', '2000', '01', '05', '13', '21', '57',
            '0.10', '11.988', '44.318', '0.77', '0.25', '315', '7.9', '0.5',
            '3.89', '0.199', '   ', '   ', '3.8', '0.1', '   ', '   '
        ]

        self.eq_reader = EqEntryReader(
            get_data_path('ISC_small_data.csv', DATA_DIR))
示例#8
0
    def test_parameters_gardner_knopoff(self):

        self.context.config['eq_catalog_file'] = get_data_path(
            'declustering_input_test.csv', DATA_DIR)
        self.context.config['GardnerKnopoff']['time_dist_windows'] = \
                'GardnerKnopoff'
        self.context.config['GardnerKnopoff']['foreshock_time_window'] = 0.5

        read_eq_catalog(self.context)
        create_catalog_matrix(self.context)

        def mock(data, time_dist_windows, foreshock_time_window):
            self.assertEquals("GardnerKnopoff", time_dist_windows)
            self.assertEquals(0.5, foreshock_time_window)
            return None, None, None

        self.context.map_sc['gardner_knopoff'] = mock
        gardner_knopoff(self.context)
示例#9
0
    def setUp(self):

        def square_job(context):
            value = context.number
            context.number = value * value

        def double_job(context):
            value = context.number
            context.number = 2 * value

        self.square_job = square_job
        self.double_job = double_job

        self.pipeline_name = 'square pipeline'
        self.pipeline = PipeLine(self.pipeline_name)

        self.context = Context(get_data_path('config.yml', DATA_DIR))
        self.context.number = 2
示例#10
0
    def test_parameters_stepp(self):
        self.context.config['eq_catalog_file'] = get_data_path(
            'completeness_input_test.csv', DATA_DIR)

        self.context.config['Stepp']['time_window'] = 5
        self.context.config['Stepp']['magnitude_windows'] = 0.1
        self.context.config['Stepp']['sensitivity'] = 0.2
        self.context.config['Stepp']['increment_lock'] = True

        read_eq_catalog(self.context)
        create_catalog_matrix(self.context)

        def mock(year, mw, magnitude_windows, time_window, sensitivity, iloc):
            self.assertEqual(time_window, 5)
            self.assertEqual(magnitude_windows, 0.1)
            self.assertEqual(sensitivity, 0.2)
            self.assertTrue(iloc)

        self.context.map_sc['stepp'] = mock
        stepp(self.context)
示例#11
0
# <http://www.gnu.org/licenses/lgpl-3.0.txt> for a copy of the LGPLv3 License.

"""
The purpose of this module is to provide functions
which tackle specific job.
"""

import logging
import numpy as np
from shapely.geometry import Polygon, Point

from mtoolkit.eqcatalog     import EqEntryReader
from mtoolkit.smodel        import NRMLReader
from mtoolkit.utils import get_data_path, SCHEMA_DIR

NRML_SCHEMA_PATH = get_data_path('nrml.xsd', SCHEMA_DIR)


def logged_job(job):
    """
    Decorate a job by adding logging
    statements before and after the execution
    of the job
    """

    def wrapper(context):
        """Wraps a job, adding logging statements"""
        logger = logging.getLogger('mt_logger')
        start_job_line = 'Start:\t%21s \t' % job.__name__
        end_job_line = 'End:\t%21s \t' % job.__name__
        logger.info(start_job_line)
示例#12
0
 def setUp(self):
     self.pipeline_name = 'main workflow'
     self.pipeline_builder = PipeLineBuilder(self.pipeline_name)
     self.context = Context(get_data_path('config.yml', DATA_DIR))
示例#13
0
 def setUp(self):
     self.context = Context(get_data_path('config.yml', DATA_DIR))
示例#14
0
# version 3 along with OpenQuake. If not, see
# <http://www.gnu.org/licenses/lgpl-3.0.txt> for a copy of the LGPLv3 License.
"""
The purpose of this module is to provide functions
which tackle specific job.
"""

import logging
import numpy as np
from shapely.geometry import Polygon, Point

from mtoolkit.eqcatalog import EqEntryReader
from mtoolkit.smodel import NRMLReader
from mtoolkit.utils import get_data_path, SCHEMA_DIR

NRML_SCHEMA_PATH = get_data_path('nrml.xsd', SCHEMA_DIR)


def logged_job(job):
    """
    Decorate a job by adding logging
    statements before and after the execution
    of the job
    """
    def wrapper(context):
        """Wraps a job, adding logging statements"""
        logger = logging.getLogger('mt_logger')
        start_job_line = 'Start:\t%21s \t' % job.__name__
        end_job_line = 'End:\t%21s \t' % job.__name__
        logger.info(start_job_line)
        job(context)