Ejemplo n.º 1
0
    def test_read_sites_from_exposure(self):
        """
        Test reading site data from an exposure file using
        :py:function:`openquake.risk.job.general.read_sites_from_exposure`.
        """
        job_config_file = helpers.smoketest_file('simplecase/config.gem')

        test_job = helpers.job_from_file(job_config_file)

        expected_sites = [
            shapes.Site(-118.077721, 33.852034),
            shapes.Site(-118.067592, 33.855398),
            shapes.Site(-118.186739, 33.779013)]

        self.assertEqual(expected_sites,
            general.read_sites_from_exposure(test_job))
import json
import mock
import unittest

from openquake import flags
from openquake import job
from openquake import kvs
from openquake import shapes
from openquake.risk.job import deterministic as risk_job_det

from tests.utils import helpers
from tests.utils.helpers import patch

TEST_JOB_ID = "1234"
TEST_REGION = shapes.Region.from_simple((0.1, 0.1), (0.2, 0.2))
TEST_JOB_FILE = helpers.smoketest_file('deterministic/config.gem')


class DeterministicRiskTestCase(unittest.TestCase):
    """
    Test case for module-level functions of the deterministic risk job code.
    """

    def setUp(self):
        flags.FLAGS.include_defaults = False

    def tearDown(self):
        flags.FLAGS.include_defaults = True

    def test_load_gmvs_for_point(self):
        """
Ejemplo n.º 3
0
from openquake.hazard import opensha
import openquake.hazard.job

from tests.utils import helpers
from tests.kvs_unittest import ONE_CURVE_MODEL

LOG = logs.LOG

MEAN_GROUND_INTENSITY = (
    '{"site":"+35.0000 +35.0000", "intensity": 1.9249e+00,'
    '"site":"+35.0500 +35.0000", "intensity": 1.9623e+00,'
    '"site":"+35.1000 +35.0000", "intensity": 2.0320e+00,'
    '"site":"+35.1500 +35.0000", "intensity": 2.0594e+00}')

TASK_JOBID_SIMPLE = ["JOB1", "JOB2", "JOB3", "JOB4"]
TEST_JOB_FILE = helpers.smoketest_file('simplecase/config.gem')

TEST_SOURCE_MODEL = ""
with open(
    helpers.smoketest_file('simplecase/expected_source_model.json'), 'r') as f:
    TEST_SOURCE_MODEL = f.read()

TEST_GMPE_MODEL = ""
with open(
    helpers.smoketest_file('simplecase/expected_gmpe_model.json'), 'r') as f:
    TEST_GMPE_MODEL = f.read()

NRML_SCHEMA_PATH = os.path.join(helpers.SCHEMA_DIR, xml.NRML_SCHEMA_FILE)
NRML_SCHEMA_PATH_OLD = \
    os.path.join(helpers.SCHEMA_DIR, xml.NRML_SCHEMA_FILE_OLD)
import math
import numpy
import unittest
import json

from tests.utils import helpers
from tests.utils.helpers import patch

from openquake import java
from openquake import kvs
from openquake import flags
from openquake import shapes

from openquake.hazard import deterministic as det

DETERMINISTIC_SMOKE_TEST = helpers.smoketest_file("deterministic/config.gem")
NUMBER_OF_CALC_KEY = "NUMBER_OF_GROUND_MOTION_FIELDS_CALCULATIONS"


def compute_ground_motion_field(self, _random_generator):
    """Stubbed version of the method that computes the ground motion
    field calling java stuff."""

    hashmap = java.jclass("HashMap")()

    for site in self.sites_to_compute():
        location = java.jclass("Location")(site.latitude, site.longitude)
        site = java.jclass("Site")(location)
        hashmap.put(site, 0.5)

    return hashmap
Ejemplo n.º 5
0
from openquake import shapes
from tests.utils import helpers
from openquake import job
from openquake import flags
from openquake.job import Job, EXPOSURE, INPUT_REGION, LOG
from openquake.job.mixins import Mixin
from openquake.risk.job import RiskJobMixin
from openquake.risk.job.probabilistic import ProbabilisticEventMixin
from openquake.risk.job.classical_psha import ClassicalPSHABasedMixin


CONFIG_FILE = "config.gem"
CONFIG_WITH_INCLUDES = "config_with_includes.gem"
HAZARD_ONLY = "hazard-config.gem"

TEST_JOB_FILE = helpers.smoketest_file('simplecase/config.gem')
TEST_JOB_FILE_CLASSICAL = helpers.smoketest_file(
                            'classical_psha_simple/classical-psha-config.gem')

SITE = shapes.Site(1.0, 1.0)
EXPOSURE_TEST_FILE = "exposure-portfolio.xml"
REGION_EXPOSURE_TEST_FILE = "ExposurePortfolioFile-helpers.region"
BLOCK_SPLIT_TEST_FILE = "block_split.gem"
REGION_TEST_FILE = "small.region"

FLAGS = flags.FLAGS


class JobTestCase(unittest.TestCase):
    def setUp(self):
        self.generated_files = []
Ejemplo n.º 6
0
from openquake.hazard import opensha
import openquake.hazard.job

from tests.utils import helpers
from tests.kvs_unittest import ONE_CURVE_MODEL

LOG = logs.LOG

MEAN_GROUND_INTENSITY = (
    '{"site":"+35.0000 +35.0000", "intensity": 1.9249e+00,'
    '"site":"+35.0500 +35.0000", "intensity": 1.9623e+00,'
    '"site":"+35.1000 +35.0000", "intensity": 2.0320e+00,'
    '"site":"+35.1500 +35.0000", "intensity": 2.0594e+00}')

TASK_JOBID_SIMPLE = ["JOB1", "JOB2", "JOB3", "JOB4"]
TEST_JOB_FILE = helpers.smoketest_file('simplecase/config.gem')

TEST_SOURCE_MODEL = ""
with open(
    helpers.smoketest_file('simplecase/expected_source_model.json'), 'r') as f:
    TEST_SOURCE_MODEL = f.read()

TEST_GMPE_MODEL = ""
with open(
    helpers.smoketest_file('simplecase/expected_gmpe_model.json'), 'r') as f:
    TEST_GMPE_MODEL = f.read()

NRML_SCHEMA_PATH = os.path.join(helpers.SCHEMA_DIR, xml.NRML_SCHEMA_FILE)
NRML_SCHEMA_PATH_OLD = \
    os.path.join(helpers.SCHEMA_DIR, xml.NRML_SCHEMA_FILE_OLD)
Ejemplo n.º 7
0
    def test_generate_hazard_curves_using_classical_psha(self):

        def verify_realization_haz_curves_stored_to_kvs(hazengine):
            """ This just tests to make sure there something in the KVS
            for each key in given list of keys. This does NOT test the
            actual results. """
            # TODO (LB): At some point we need to test the actual
            # results to verify they are correct

            realizations = int(
                hazengine.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])

            for realization in xrange(0, realizations):
                for site in hazengine.sites_to_compute():
                    key = tokens.hazard_curve_poes_key(
                        hazengine.job_id, realization, site)

                    value = self.kvs_client.get(key)
                    # LOG.debug("kvs value is %s" % value)
                    self.assertTrue(value is not None,
                        "no non-empty value found at KVS key")

        def verify_mean_haz_curves_stored_to_kvs(hazengine):
            """ Make sure that the keys and non-empty values for mean
            hazard curves have been written to KVS."""

            if hazengine.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true':

                LOG.debug("verifying KVS entries for mean hazard curves")
                for site in hazengine.sites_to_compute():
                    key = tokens.mean_hazard_curve_key(hazengine.job_id, site)
                    value = self.kvs_client.get(key)
                    self.assertTrue(
                        value is not None, "no value found at KVS key")

        def verify_mean_haz_maps_stored_to_kvs(hazengine):
            """ Make sure that the keys and non-empty values for mean
            hazard maps have been written to KVS."""

            if (hazengine.params[classical_psha.POES_PARAM_NAME] != '' and
                hazengine.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \
                'true'):

                LOG.debug("verifying KVS entries for mean hazard maps")

                for poe in hazengine.poes_hazard_maps:
                    for site in hazengine.sites_to_compute():
                        key = tokens.mean_hazard_map_key(
                            hazengine.job_id, site, poe)
                        value = self.kvs_client.get(key)
                        self.assertTrue(
                            value is not None, "no value found at KVS key")

        def verify_quantile_haz_curves_stored_to_kvs(hazengine):
            """ Make sure that the keys and non-empty values for quantile
            hazard curves have been written to KVS."""

            quantiles = hazengine.quantile_levels

            LOG.debug("verifying KVS entries for quantile hazard curves, "\
                "%s quantile values" % len(quantiles))

            for quantile in quantiles:
                for site in hazengine.sites_to_compute():
                    key = tokens.quantile_hazard_curve_key(
                        hazengine.job_id, site, quantile)
                    value = self.kvs_client.get(key)
                    self.assertTrue(
                        value is not None, "no value found at KVS key")

        def verify_quantile_haz_maps_stored_to_kvs(hazengine):
            """ Make sure that the keys and non-empty values for quantile
            hazard maps have been written to KVS."""

            quantiles = hazengine.quantile_levels

            if (hazengine.params[classical_psha.POES_PARAM_NAME] != '' and
                len(quantiles) > 0):

                poes = hazengine.poes_hazard_maps

                LOG.debug("verifying KVS entries for quantile hazard maps, "\
                    "%s quantile values, %s PoEs" % (
                    len(quantiles), len(poes)))

                for quantile in quantiles:
                    for poe in poes:
                        for site in hazengine.sites_to_compute():
                            key = tokens.quantile_hazard_map_key(
                                hazengine.job_id, site, poe, quantile)
                            value = self.kvs_client.get(key)
                            self.assertTrue(
                                value is not None,
                                "no value found at KVS key %s" % key)

        def verify_realization_haz_curves_stored_to_nrml(hazengine):
            """Tests that a NRML file has been written for each realization,
            and that this file validates against the NRML schema.
            Does NOT test if results in NRML file are correct.
            """
            realizations = int(
                hazengine.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])
            for realization in xrange(0, realizations):

                nrml_path = os.path.join(
                    "smoketests/classical_psha_simple/computed_output",
                    hazengine.hazard_curve_filename(realization))

                LOG.debug("validating NRML file %s" % nrml_path)

                self.assertTrue(xml.validates_against_xml_schema(
                    nrml_path, NRML_SCHEMA_PATH),
                    "NRML instance file %s does not validate against schema" \
                    % nrml_path)

        def verify_mean_haz_curves_stored_to_nrml(hazengine):
            """Tests that a mean hazard curve NRML file has been written,
            and that this file validates against the NRML schema.
            Does NOT test if results in NRML file are correct.
            """

            if hazengine.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true':
                nrml_path = os.path.join(
                    "smoketests/classical_psha_simple/computed_output",
                    hazengine.mean_hazard_curve_filename())

                LOG.debug("validating NRML file %s" % nrml_path)

                self.assertTrue(xml.validates_against_xml_schema(
                    nrml_path, NRML_SCHEMA_PATH),
                    "NRML instance file %s does not validate against schema" \
                    % nrml_path)

        def verify_mean_haz_maps_stored_to_nrml(hazengine):
            """Tests that a mean hazard map NRML file has been written,
            and that this file validates against the NRML schema.
            Does NOT test if results in NRML file are correct.
            """
            if (hazengine.params[classical_psha.POES_PARAM_NAME] != '' and
                hazengine.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \
                'true'):

                for poe in hazengine.poes_hazard_maps:
                    nrml_path = os.path.join(
                        "smoketests/classical_psha_simple/computed_output",
                        hazengine.mean_hazard_map_filename(poe))

                    LOG.debug("validating NRML file for mean hazard map %s" \
                        % nrml_path)

                    self.assertTrue(xml.validates_against_xml_schema(
                        nrml_path, NRML_SCHEMA_PATH),
                        "NRML instance file %s does not validate against "\
                        "schema" % nrml_path)

        def verify_quantile_haz_curves_stored_to_nrml(hazengine):
            """Tests that quantile hazard curve NRML files have been written,
            and that these file validate against the NRML schema.
            Does NOT test if results in NRML files are correct.
            """

            for quantile in hazengine.quantile_levels:

                nrml_path = os.path.join(
                    "smoketests/classical_psha_simple/computed_output",
                    hazengine.quantile_hazard_curve_filename(quantile))

                LOG.debug("validating NRML file for quantile hazard curve: "\
                    "%s" % nrml_path)

                self.assertTrue(xml.validates_against_xml_schema(
                    nrml_path, NRML_SCHEMA_PATH),
                    "NRML instance file %s does not validate against schema" \
                    % nrml_path)

        def verify_quantile_haz_maps_stored_to_nrml(hazengine):
            """Tests that quantile hazard map NRML files have been written,
            and that these file validate against the NRML schema.
            Does NOT test if results in NRML files are correct.
            """

            quantiles = hazengine.quantile_levels

            if (hazengine.params[classical_psha.POES_PARAM_NAME] != '' and
                len(quantiles) > 0):

                for poe in hazengine.poes_hazard_maps:
                    for quantile in quantiles:
                        nrml_path = os.path.join(
                            "smoketests/classical_psha_simple/computed_output",
                            hazengine.quantile_hazard_map_filename(quantile,
                                                                   poe))

                        LOG.debug("validating NRML file for quantile hazard "\
                            "map: %s" % nrml_path)

                        self.assertTrue(xml.validates_against_xml_schema(
                            nrml_path, NRML_SCHEMA_PATH),
                            "NRML instance file %s does not validate against "\
                            "schema" % nrml_path)

        test_file_path = helpers.smoketest_file(
            "classical_psha_simple/config.gem")

        hazengine = helpers.job_from_file(test_file_path)

        with mixins.Mixin(hazengine, openquake.hazard.job.HazJobMixin):
            hazengine.execute()

            verify_realization_haz_curves_stored_to_kvs(hazengine)
            verify_realization_haz_curves_stored_to_nrml(hazengine)

            # hazard curves: check results of mean and quantile computation
            verify_mean_haz_curves_stored_to_kvs(hazengine)
            verify_quantile_haz_curves_stored_to_kvs(hazengine)

            verify_mean_haz_curves_stored_to_nrml(hazengine)
            verify_quantile_haz_curves_stored_to_nrml(hazengine)

            # hazard maps: check results of mean and quantile computation
            verify_mean_haz_maps_stored_to_kvs(hazengine)
            verify_quantile_haz_maps_stored_to_kvs(hazengine)

            verify_mean_haz_maps_stored_to_nrml(hazengine)
            verify_quantile_haz_maps_stored_to_nrml(hazengine)
Ejemplo n.º 8
0
from openquake import shapes
from tests.utils import helpers
from openquake import job
from openquake import flags
from openquake.job import Job, EXPOSURE, INPUT_REGION, LOG
from openquake.job.mixins import Mixin
from openquake.risk.job import RiskJobMixin
from openquake.risk.job.probabilistic import ProbabilisticEventMixin
from openquake.risk.job.classical_psha import ClassicalPSHABasedMixin

CONFIG_FILE = "config.gem"
CONFIG_WITH_INCLUDES = "config_with_includes.gem"
HAZARD_ONLY = "hazard-config.gem"

TEST_JOB_FILE = helpers.smoketest_file('simplecase/config.gem')
TEST_JOB_FILE_CLASSICAL = helpers.smoketest_file(
    'classical_psha_simple/classical-psha-config.gem')

SITE = shapes.Site(1.0, 1.0)
EXPOSURE_TEST_FILE = "exposure-portfolio.xml"
REGION_EXPOSURE_TEST_FILE = "ExposurePortfolioFile-helpers.region"
BLOCK_SPLIT_TEST_FILE = "block_split.gem"
REGION_TEST_FILE = "small.region"

FLAGS = flags.FLAGS


class JobTestCase(unittest.TestCase):
    def setUp(self):
        self.generated_files = []