def setUp(self):
        self.pprocessing_result_filename = get_data_path('out.csv', DATA_DIR)

        self.first_data_row = {'eventID': 1, 'Agency': 'AAA', 'month': 1,
                                'depthError': 0.5, 'second': 13.0,
                                'SemiMajor90': 2.43, 'year': 2000,
                                'ErrorStrike': 298.0, 'timeError': 0.02,
                                'sigmamb': '', 'latitude': 44.368,
                                'sigmaMw': 0.355, 'sigmaMs': '',
                                'Mw': 1.71, 'Ms': '',
                                'Identifier': 20000102034913, 'day': 2,
                                'minute': 49, 'hour': 3,
                                'mb': '', 'SemiMinor90': 1.01,
                                'longitude': 7.282, 'depth': 9.3,
                                'ML': 1.7, 'sigmaML': 0.1}

        self.second_data_row = {'eventID': 2, 'Agency': 'AAA', 'month': 1,
                                'depthError': 0.5, 'second': 57.0,
                                'SemiMajor90': 0.77, 'year': 2000,
                                'ErrorStrike': 315.0, 'timeError': 0.1,
                                'sigmamb': 0.1, 'latitude': 44.318,
                                'sigmaMw': 0.199, 'sigmaMs': '',
                                'Mw': 3.89, 'Ms': '',
                                'Identifier': 20000105132157, 'day': 5,
                                'minute': 21, 'hour': 13,
                                'mb': 3.8, 'SemiMinor90': 0.25,
                                'longitude': 11.988, 'depth': 7.9,
                                'ML': '', 'sigmaML': ''}

        self.writer = EqEntryWriter(self.pprocessing_result_filename)

        self.expected_csv = get_data_path('expected_entries.csv', DATA_DIR)
class EqEntryWriterTestCase(unittest.TestCase):

    def setUp(self):
        self.pprocessing_result_filename = get_data_path('out.csv', DATA_DIR)

        self.first_data_row = {'eventID': 1, 'Agency': 'AAA', 'month': 1,
                                'depthError': 0.5, 'second': 13.0,
                                'SemiMajor90': 2.43, 'year': 2000,
                                'ErrorStrike': 298.0, 'timeError': 0.02,
                                'sigmamb': '', 'latitude': 44.368,
                                'sigmaMw': 0.355, 'sigmaMs': '',
                                'Mw': 1.71, 'Ms': '',
                                'Identifier': 20000102034913, 'day': 2,
                                'minute': 49, 'hour': 3,
                                'mb': '', 'SemiMinor90': 1.01,
                                'longitude': 7.282, 'depth': 9.3,
                                'ML': 1.7, 'sigmaML': 0.1}

        self.second_data_row = {'eventID': 2, 'Agency': 'AAA', 'month': 1,
                                'depthError': 0.5, 'second': 57.0,
                                'SemiMajor90': 0.77, 'year': 2000,
                                'ErrorStrike': 315.0, 'timeError': 0.1,
                                'sigmamb': 0.1, 'latitude': 44.318,
                                'sigmaMw': 0.199, 'sigmaMs': '',
                                'Mw': 3.89, 'Ms': '',
                                'Identifier': 20000105132157, 'day': 5,
                                'minute': 21, 'hour': 13,
                                'mb': 3.8, 'SemiMinor90': 0.25,
                                'longitude': 11.988, 'depth': 7.9,
                                'ML': '', 'sigmaML': ''}

        self.writer = EqEntryWriter(self.pprocessing_result_filename)

        self.expected_csv = get_data_path('expected_entries.csv', DATA_DIR)

    def test_an_incorrect_csv_dirname_raise_exception(self):
        self.assertRaises(IOError, EqEntryWriter, 'invalid/dir/name')

    def test_write_csv_file(self):
        rows = [self.first_data_row, self.second_data_row]
        self.writer.write_rows(rows)

        self.assertTrue(filecmp.cmp(self.expected_csv,
            self.pprocessing_result_filename))
Exemplo n.º 3
0
def store_preprocessed_catalog(context):
    """
    Write in a csv file the earthquake
    catalog after preprocessing jobs (i.e.
    gardner_knopoff, stepp)
    :param context: shared datastore across different jobs
        in a pipeline
    """

    writer = EqEntryWriter(context.config["pprocessing_result_file"])
    indexes_entries_to_store = np.where(context.selected_eq_vector == 0)[0]
    number_written_eq = len(indexes_entries_to_store)

    entries = []
    for index in indexes_entries_to_store:
        entries.append(context.eq_catalog[index])

    writer.write_rows(entries)

    LOGGER.debug("* Stored Eq entries: %d" % number_written_eq)

    LOGGER.debug(
        "* Number of events removed after preprocessing jobs: %d" % (len(context.catalog_matrix) - number_written_eq)
    )
Exemplo n.º 4
0
def store_preprocessed_catalog(context):
    """
    Write in a csv file the earthquake
    catalog after preprocessing jobs (i.e.
    gardner_knopoff, stepp)
    :param context: shared datastore across different jobs
        in a pipeline
    """

    writer = EqEntryWriter(
        context.config['pprocessing_result_file'])
    indexes_entries_to_store = np.where(context.selected_eq_vector == 0)[0]
    number_written_eq = len(indexes_entries_to_store)

    entries = []
    for index in indexes_entries_to_store:
        entries.append(context.eq_catalog[index])

    writer.write_rows(entries)

    LOGGER.debug("* Stored Eq entries: %d" % number_written_eq)

    LOGGER.debug("* Number of events removed after preprocessing jobs: %d" %
        (len(context.catalog_matrix) - number_written_eq))
Exemplo n.º 5
0
    def setUp(self):
        self.pprocessing_result_filename = get_data_path('out.csv', DATA_DIR)

        self.first_data_row = {
            'eventID': 1,
            'Agency': 'AAA',
            'month': 1,
            'depthError': 0.5,
            'second': 13.0,
            'SemiMajor90': 2.43,
            'year': 2000,
            'ErrorStrike': 298.0,
            'timeError': 0.02,
            'sigmamb': '',
            'latitude': 44.368,
            'sigmaMw': 0.355,
            'sigmaMs': '',
            'Mw': 1.71,
            'Ms': '',
            'Identifier': 20000102034913,
            'day': 2,
            'minute': 49,
            'hour': 3,
            'mb': '',
            'SemiMinor90': 1.01,
            'longitude': 7.282,
            'depth': 9.3,
            'ML': 1.7,
            'sigmaML': 0.1
        }

        self.second_data_row = {
            'eventID': 2,
            'Agency': 'AAA',
            'month': 1,
            'depthError': 0.5,
            'second': 57.0,
            'SemiMajor90': 0.77,
            'year': 2000,
            'ErrorStrike': 315.0,
            'timeError': 0.1,
            'sigmamb': 0.1,
            'latitude': 44.318,
            'sigmaMw': 0.199,
            'sigmaMs': '',
            'Mw': 3.89,
            'Ms': '',
            'Identifier': 20000105132157,
            'day': 5,
            'minute': 21,
            'hour': 13,
            'mb': 3.8,
            'SemiMinor90': 0.25,
            'longitude': 11.988,
            'depth': 7.9,
            'ML': '',
            'sigmaML': ''
        }

        self.writer = EqEntryWriter(self.pprocessing_result_filename)

        self.expected_csv = get_data_path('expected_entries.csv', DATA_DIR)
Exemplo n.º 6
0
class EqEntryWriterTestCase(unittest.TestCase):
    def setUp(self):
        self.pprocessing_result_filename = get_data_path('out.csv', DATA_DIR)

        self.first_data_row = {
            'eventID': 1,
            'Agency': 'AAA',
            'month': 1,
            'depthError': 0.5,
            'second': 13.0,
            'SemiMajor90': 2.43,
            'year': 2000,
            'ErrorStrike': 298.0,
            'timeError': 0.02,
            'sigmamb': '',
            'latitude': 44.368,
            'sigmaMw': 0.355,
            'sigmaMs': '',
            'Mw': 1.71,
            'Ms': '',
            'Identifier': 20000102034913,
            'day': 2,
            'minute': 49,
            'hour': 3,
            'mb': '',
            'SemiMinor90': 1.01,
            'longitude': 7.282,
            'depth': 9.3,
            'ML': 1.7,
            'sigmaML': 0.1
        }

        self.second_data_row = {
            'eventID': 2,
            'Agency': 'AAA',
            'month': 1,
            'depthError': 0.5,
            'second': 57.0,
            'SemiMajor90': 0.77,
            'year': 2000,
            'ErrorStrike': 315.0,
            'timeError': 0.1,
            'sigmamb': 0.1,
            'latitude': 44.318,
            'sigmaMw': 0.199,
            'sigmaMs': '',
            'Mw': 3.89,
            'Ms': '',
            'Identifier': 20000105132157,
            'day': 5,
            'minute': 21,
            'hour': 13,
            'mb': 3.8,
            'SemiMinor90': 0.25,
            'longitude': 11.988,
            'depth': 7.9,
            'ML': '',
            'sigmaML': ''
        }

        self.writer = EqEntryWriter(self.pprocessing_result_filename)

        self.expected_csv = get_data_path('expected_entries.csv', DATA_DIR)

    def test_an_incorrect_csv_dirname_raise_exception(self):
        self.assertRaises(IOError, EqEntryWriter, 'invalid/dir/name')

    def test_write_csv_file(self):
        rows = [self.first_data_row, self.second_data_row]
        self.writer.write_rows(rows)

        self.assertTrue(
            filecmp.cmp(self.expected_csv, self.pprocessing_result_filename))