def setUp(self):
        self.cat = CatalogueDatabase(memory=True, drop=True)

        isf_bulletin_filename = "isf_two_events.txt"
        iaspei_filename = "iaspei_from_isf.csv"

        self.cat.load_file(in_data_dir(isf_bulletin_filename), "isf_bulletin")
        self.cat.load_file(in_data_dir(iaspei_filename), "iaspei")
Пример #2
0
def load_fixtures(session):
    # Allows to insert test entries in the earthquake db.
    csv_filename = in_data_dir('query_catalogue.csv')
    with open(csv_filename) as eq_source:
        reader = CsvEqCatalogueReader(eq_source)
        entries = [entry for entry in reader.read(Converter())]

    event_source = 'query_catalogue'

    for entry in entries:
        agency = entry['solutionAgency']

        entry_time = datetime(entry['year'], entry['month'], entry['day'],
                              entry['hour'], entry['minute'],
                              int(entry['second']))
        entry_pos = 'POINT(%f %f)' % (entry['Longitude'], entry['Latitude'])
        origin = dict(
            time=entry_time, position=WKTSpatialElement(entry_pos),
            origin_key=entry['eventKey'], depth=entry['depth'])

        mag_measure = models.MagnitudeMeasure(
            agency=agency,
            scale=entry['mag_type'],
            value=entry['magnitude'], standard_error=0.2,
            event_key=entry['eventKey'],
            event_source=event_source, **origin)

        session.add(mag_measure)
Пример #3
0
 def test_drop(self):
     catalogue.CatalogueDatabase.reset_singleton()
     self.catalogue = catalogue.CatalogueDatabase(
         drop=True,
         filename=in_data_dir("test_drop.db"))
     catalogue.CatalogueDatabase.reset_singleton()
     self.catalogue = catalogue.CatalogueDatabase(memory=True, drop=True)
Пример #4
0
def load_fixtures(session):
    # Allows to insert test entries in the earthquake db.
    csv_filename = in_data_dir('query_catalogue.csv')
    with open(csv_filename) as eq_source:
        reader = CsvEqCatalogueReader(eq_source)
        entries = [entry for entry in reader.read(Converter())]

    event_source = models.EventSource(name='query_catalogue')
    session.add(event_source)
    for entry in entries:
        inserted_agency = session.query(models.Agency).filter(
                models.Agency.source_key == entry['solutionAgency'])
        if not inserted_agency.count():
            agency = models.Agency(source_key=entry['solutionAgency'],
                    eventsource=event_source)
            session.add(agency)
        else:
            agency = inserted_agency.all()[0]

        inserted_event = session.query(
                models.Event).filter_by(
                source_key=entry['eventKey'])
        if not inserted_event.count():
            event = models.Event(source_key=entry['eventKey'],
                eventsource=event_source)
            session.add(event)
        else:
            event = inserted_event.all()[0]

        entry_time = datetime(entry['year'], entry['month'], entry['day'],
                                entry['hour'], entry['minute'],
                                int(entry['second']))
        entry_pos = 'POINT(%f %f)' % (entry['Longitude'], entry['Latitude'])
        origin = models.Origin(
            time=entry_time, position=WKTSpatialElement(entry_pos),
            depth=entry['depth'], eventsource=event_source,
            source_key=entry['eventKey'])

        mag_measure = models.MagnitudeMeasure(agency=agency, event=event,
                origin=origin, scale=entry['mag_type'],
            value=entry['magnitude'], standard_error=0.2)

        measure_meta = models.MeasureMetadata(
                metadata_type='stations', value=entry['stations'],
                magnitudemeasure=mag_measure)

        session.add(origin)
        session.add(mag_measure)
        session.add(measure_meta)
Пример #5
0
 def test_drop(self):
     self.catalogue = catalogue.CatalogueDatabase(memory=True, drop=True)
     self.catalogue = catalogue.CatalogueDatabase(
         drop=True, filename=in_data_dir("test_drop.db"))
Пример #6
0
 def _plot_and_assert(self, h**o, filename_prefix):
     graph_filename = in_data_dir("qa_homo_%s.png" % filename_prefix)
     h**o.plot(graph_filename)
     self.assertTrue(os.path.exists(in_data_dir(graph_filename)))
Пример #7
0
import unittest
import os
from datetime import datetime

from tests.test_utils import in_data_dir

from eqcatalogue import models, selection
from eqcatalogue.filtering import C
from eqcatalogue.regression import (LinearModel, PolynomialModel)
from eqcatalogue.homogeniser import Homogeniser

DB = models.CatalogueDatabase(filename=in_data_dir('qa.db'))


class HomogeniserAPI(unittest.TestCase):

    def _plot_and_assert(self, h**o, filename_prefix):
        graph_filename = in_data_dir("qa_homo_%s.png" % filename_prefix)
        h**o.plot(graph_filename)
        self.assertTrue(os.path.exists(in_data_dir(graph_filename)))

    def test_different_configs(self):
        h**o = Homogeniser("mb", "MS")
        h**o.set_criteria(C(agency__in=["ISC", "BJI"]))
        h**o.set_selector(selection.Precise)
        h**o.add_model(LinearModel)
        self._plot_and_assert(h**o, 'first')

        h**o.set_criteria(C(before=datetime.now()))
        ranking = {"ML": ["ISC", "IDC"], "mb": ["ISC", "FUNV"]}
        h**o.set_selector(selection.AgencyRanking, ranking=ranking)
Пример #8
0
import unittest
import numpy as np
from tests.test_utils import in_data_dir

from matplotlib.testing.compare import compare_images

from eqcatalogue import regression, models
from eqcatalogue.serializers.mpl import plot

ACTUAL1 = in_data_dir('actual1.png')
EXPECTED1 = in_data_dir('expected1.png')


class ShoudPlotEMSR(unittest.TestCase):
    def test_plot_emsr(self):
        # Assess
        p2_0 = 0.046
        p2_1 = 0.556
        p2_2 = 0.673
        points = 40

        native_values = np.random.uniform(3., 8.5, points)
        native_sigmas = np.random.uniform(0.02, 0.2, points)
        target_values = p2_0 + p2_1 * native_values +\
          p2_2 * (native_values ** 2.)
        target_values += np.random.normal(0., 1, points)
        target_sigmas = np.random.uniform(0.025, 0.2, points)
        native_measures = [models.MagnitudeMeasure(
            agency=None, event=None, origin=None,
            scale='Mtest', value=v[0], standard_error=v[1])
            for v in zip(native_values, native_sigmas)]
 def _write_and_check(self):
     self.homogeniser.serialize(ACTUAL_OUTPUT[self.i])
     self.assertTrue(path.exists(in_data_dir(ACTUAL_OUTPUT[self.i])))
     self.i = self.i + 1
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with eqcatalogueTool. If not, see <http://www.gnu.org/licenses/>.


import unittest
from os import path
from eqcatalogue.homogeniser import Homogeniser
from tests.test_utils import in_data_dir, load_catalog
from eqcatalogue.regression import LinearModel, PolynomialModel
from eqcatalogue import selection, grouping, models
from eqcatalogue.filtering import C

ACTUAL_OUTPUT = [in_data_dir("actual_homo%d.png" % i)
                 for i in range(1, 14)]


class AnHomogeniserShould(unittest.TestCase):

    def setUp(self):
        # we can not load the fixtures once, because the MUSSetDefault
        # is not readonly
        load_catalog()
        self.homogeniser = Homogeniser()
        self.homogeniser.set_scales(native="mb", target="MS")
        self.i = 0

    def _write_and_check(self):
        self.homogeniser.serialize(ACTUAL_OUTPUT[self.i])
Пример #11
0
from StringIO import StringIO


from eqcatalogue.importers import (CsvEqCatalogueReader, Converter, Importer,
    Iaspei, V1, isf_bulletin as isf)

from eqcatalogue.importers.reader_utils import (STR_TRANSF, INT_TRANSF,
                                                FLOAT_TRANSF)
from eqcatalogue.exceptions import InvalidMagnitudeSeq, ParsingFailure

from eqcatalogue import models as catalogue

from tests.test_utils import in_data_dir


DATAFILE_ISC = in_data_dir('isc-query-small.html')
BROKEN_ISC = in_data_dir('broken_isc.txt')

DATAFILE_IASPEI = in_data_dir('iaspei.csv')


class ShouldImportFromISFBulletinV1(unittest.TestCase):

    def setUp(self):
        self.f = file(DATAFILE_ISC)
        self.broken_isc = file(BROKEN_ISC)
        self.cat = catalogue.CatalogueDatabase(memory=True, drop=True)
        self.cat.recreate()

    def tearDown(self):
        self.f.close()
Пример #12
0
 def setUp(self):
     eqcatalogue.CatalogueDatabase(filename=in_data_dir('qa.db'))
     self.maxDiff = None

from eqcatalogue.importers import (
    CsvEqCatalogueReader, Converter, BaseImporter,
    Iaspei, V1, isf_bulletin as isf)

from eqcatalogue.importers.reader_utils import (STR_TRANSF, INT_TRANSF,
                                                FLOAT_TRANSF)
from eqcatalogue.exceptions import InvalidMagnitudeSeq, ParsingFailure

from eqcatalogue import models as catalogue

from tests.test_utils import in_data_dir


DATAFILE_ISC = in_data_dir('isc-query-small.html')
BROKEN_ISC = in_data_dir('broken_isc.txt')
UK_SCALE_ISC = in_data_dir('isc_with_uk_scale.txt')
DATAFILE_IASPEI = in_data_dir('iaspei.csv')


class ShouldImportFromISFBulletinV1(unittest.TestCase):

    def setUp(self):
        self.f = file(DATAFILE_ISC)
        self.broken_isc = file(BROKEN_ISC)
        self.uk_scale_isc = file(UK_SCALE_ISC)
        self.cat = catalogue.CatalogueDatabase(memory=True, drop=True)

    def tearDown(self):
        self.f.close()
 def setUp(self):
     CatalogueDatabase(filename=in_data_dir("qa.db"))