Esempio n. 1
0
    def setUp(self):
        client = kvs.get_client()

        # Delete managed job id info so we can predict the job key
        # which will be allocated for us
        client.delete(kvs.tokens.CURRENT_JOBS)

        self.generated_files = []
        self.job = helpers.job_from_file(helpers.get_data_path(CONFIG_FILE))
        self.job_with_includes = helpers.job_from_file(helpers.get_data_path(CONFIG_WITH_INCLUDES))
    def test_compute_disagg_matrix(self):
        """Test the core function of the main disaggregation task."""

        # for the given test input data, we expect the calculator to return
        # this gmv:
        expected_gmv = 0.2259803374787534

        the_job = helpers.job_from_file(DISAGG_DEMO_CONFIG_FILE)

        helpers.store_hazard_logic_trees(the_job)

        site = shapes.Site(0.0, 0.0)
        poe = 0.1
        result_dir = tempfile.gettempdir()

        gmv, matrix_path = disagg_core.compute_disagg_matrix(
            the_job, site, poe, result_dir)

        # Now test the following:
        # 1) The matrix file exists
        # 2) The matrix file has a size > 0
        # 3) Check that the returned GMV is what we expect
        # Here we don't test the actual matrix contents or the hdf5 file;
        # there are tests on the Java side which verify the actual data in the
        # matrix, plus other tests on the Python side which deal with saving
        # the matrix.
        self.assertTrue(os.path.exists(matrix_path))
        self.assertTrue(os.path.getsize(matrix_path) > 0)
        self.assertEqual(expected_gmv, gmv)

        # For clean up, delete the hdf5 we generated.
        os.unlink(matrix_path)
Esempio n. 3
0
    def test_compute_disagg_matrix(self):
        # Test the core function of the main disaggregation task.

        # for the given test input data, we expect the calculator to return
        # this gmv:
        expected_gmv = 0.2259803374787534

        the_job = helpers.job_from_file(DISAGG_DEMO_CONFIG_FILE)

        helpers.store_hazard_logic_trees(the_job)

        site = shapes.Site(0.0, 0.0)
        poe = 0.1
        result_dir = tempfile.gettempdir()

        gmv, matrix_path = disagg_core.compute_disagg_matrix(
            the_job, site, poe, result_dir)

        # Now test the following:
        # 1) The matrix file exists
        # 2) The matrix file has a size > 0
        # 3) Check that the returned GMV is what we expect
        # Here we don't test the actual matrix contents or the hdf5 file;
        # there are tests on the Java side which verify the actual data in
        # the matrix, plus other tests on the Python side which deal with
        # saving the matrix.
        self.assertTrue(os.path.exists(matrix_path))
        self.assertTrue(os.path.getsize(matrix_path) > 0)
        self.assertEqual(expected_gmv, gmv)

        # For clean up, delete the hdf5 we generated.
        os.unlink(matrix_path)
Esempio n. 4
0
 def test_job_with_only_hazard_config_only_has_hazard_section(self):
     FLAGS.include_defaults = False
     try:
         job_with_only_hazard = \
             helpers.job_from_file(helpers.get_data_path(HAZARD_ONLY))
         self.assertEqual(["HAZARD"], job_with_only_hazard.sections)
     finally:
         FLAGS.include_defaults = True
Esempio n. 5
0
    def test_default_validators_scenario_job(self):
        """Test to ensure that a Scenario job always includes the
        :class:`openquake.job.config.ScenarioComputationValidator`."""
        scenario_job_path = helpers.demo_file('scenario_risk/config.gem')
        scenario_job = helpers.job_from_file(scenario_job_path)

        validators = config.default_validators(scenario_job.sections,
                                               scenario_job.params)

        self.assertTrue(any(
            isinstance(v, ScenarioComputationValidator) for v in validators))
    def test_deterministic_job_completes(self):
        """
        Exercise the deterministic risk job and make sure it runs end-to-end.
        """
        risk_job = helpers.job_from_file(TEST_JOB_FILE)

        # KVS garbage collection is going to be called asynchronously by the
        # job. We don't actually want that to happen.
        with patch('subprocess.Popen'):

            risk_job.launch()
Esempio n. 7
0
    def test_default_validators_disagg_job(self):
        """Test to ensure that a Disaggregation job always includes the
        :class:`openquake.job.config.DisaggregationValidator`.
        """
        da_job_path = helpers.demo_file('disaggregation/config.gem')
        da_job = helpers.job_from_file(da_job_path)

        validators = config.default_validators(da_job.sections, da_job.params)

        # test that the default validators include a DisaggregationValidator
        self.assertTrue(
            any(isinstance(v, DisaggregationValidator) for v in validators))
Esempio n. 8
0
    def test_default_validators_disagg_job(self):
        """Test to ensure that a Disaggregation job always includes the
        :class:`openquake.job.config.DisaggregationValidator`.
        """
        da_job_path = helpers.demo_file('disaggregation/config.gem')
        da_job = helpers.job_from_file(da_job_path)

        validators = config.default_validators(da_job.sections, da_job.params)

        # test that the default validators include a DisaggregationValidator
        self.assertTrue(any(
            isinstance(v, DisaggregationValidator) for v in validators))
Esempio n. 9
0
    def setUp(self):
        self.job_ctxt = helpers.job_from_file(os.path.join(helpers.DATA_DIR,
                                              'config.gem'))
        [input] = models.inputs4job(self.job_ctxt.job_id,
                                    input_type="exposure")
        owner = models.OqUser.objects.get(user_name="openquake")
        emdl = input.model()
        if not emdl:
            emdl = models.ExposureModel(
                owner=owner, input=input, description="RCT exposure model",
                category="RCT villas", stco_unit="roofs",
                stco_type="aggregated")
            emdl.save()

        asset_data = [
            ((0, 0), shapes.Site(10.0, 10.0),
             {u'stco': 5.07, u'asset_ref': u'a5625',
              u'taxonomy': u'rctc-ad-83'}),

            ((0, 1), shapes.Site(10.1, 10.0),
             {u'stco': 5.63, u'asset_ref': u'a5629',
              u'taxonomy': u'rctc-ad-83'}),

            ((1, 0), shapes.Site(10.0, 10.1),
             {u'stco': 11.26, u'asset_ref': u'a5630',
              u'taxonomy': u'rctc-ad-83'}),

            ((1, 1), shapes.Site(10.1, 10.1),
             {u'stco': 5.5, u'asset_ref': u'a5636',
              u'taxonomy': u'rctc-ad-83'}),
        ]
        assets = emdl.exposuredata_set.filter(taxonomy="rctc-ad-83"). \
                                       order_by("id")
        for idx, (gcoo, site, adata) in enumerate(asset_data):
            if not assets:
                location = geos.GEOSGeometry(site.point.to_wkt())
                asset = models.ExposureData(exposure_model=emdl, site=location,
                                            **adata)
                asset.save()
            else:
                asset = assets[idx]
            GRID_ASSETS[gcoo] = asset

        self.grid = shapes.Grid(shapes.Region.from_coordinates(
            [(10.0, 10.0), (10.0, 10.1), (10.1, 10.1), (10.1, 10.0)]), 0.1)

        # this is the expected output of grid_assets_iterator and an input of
        # asset_losses_per_site
        self.grid_assets = [
            (shapes.GridPoint(self.grid, 0, 0), GRID_ASSETS[(0, 0)]),
            (shapes.GridPoint(self.grid, 1, 0), GRID_ASSETS[(0, 1)]),
            (shapes.GridPoint(self.grid, 0, 1), GRID_ASSETS[(1, 0)]),
            (shapes.GridPoint(self.grid, 1, 1), GRID_ASSETS[(1, 1)])]
Esempio n. 10
0
    def test_default_validators_classical_job(self):
        """Test to ensure that a classical always includes the
        :class:`openquake.job.config.ClassicalValidator`.
        """
        classical_risk_job_path = helpers.demo_file(
            'classical_psha_based_risk/config.gem')
        classical_risk_job = helpers.job_from_file(classical_risk_job_path)

        validators = config.default_validators(classical_risk_job.sections,
                                               classical_risk_job.params)

        self.assertTrue(
            any(isinstance(v, ClassicalValidator) for v in validators))
Esempio n. 11
0
    def test_default_validators_scenario_job(self):
        """Test to ensure that a Scenario job always includes the
        :class:`openquake.job.config.ScenarioComputationValidator`."""
        scenario_job_path = helpers.demo_file('scenario_risk/config.gem')
        scenario_job = helpers.job_from_file(scenario_job_path)

        validators = config.default_validators(scenario_job.sections,
                                               scenario_job.params)

        self.assertTrue(
            any(
                isinstance(v, ScenarioComputationValidator)
                for v in validators))
Esempio n. 12
0
    def test_default_validators_classical_job(self):
        """Test to ensure that a classical always includes the
        :class:`openquake.job.config.ClassicalValidator`.
        """
        classical_risk_job_path = helpers.demo_file(
            'classical_psha_based_risk/config.gem')
        classical_risk_job = helpers.job_from_file(classical_risk_job_path)

        validators = config.default_validators(classical_risk_job.sections,
                                               classical_risk_job.params)

        self.assertTrue(
            any(isinstance(v, ClassicalValidator) for v in validators))
Esempio n. 13
0
 def test_can_store_and_read_jobs_from_kvs(self):
     flags_debug_default = flags.FLAGS.debug
     flags.FLAGS.debug = "debug"
     try:
         self.job = helpers.job_from_file(os.path.join(helpers.DATA_DIR, CONFIG_FILE))
         job_from_kvs = CalculationProxy.from_kvs(self.job.job_id)
         self.assertEqual(flags.FLAGS.debug, job_from_kvs.params.pop("debug"))
         self.assertEqual(self.job, job_from_kvs)
     finally:
         helpers.cleanup_loggers()
         # Restore the default global FLAGS.debug level
         # so we don't break stuff.
         flags.FLAGS.debug = flags_debug_default
Esempio n. 14
0
    def setUp(self):
        kvs.flush()

        self.job = helpers.job_from_file(SCENARIO_SMOKE_TEST)

        self.job.params[NUMBER_OF_CALC_KEY] = "1"

        self.job.params["SERIALIZE_RESULTS_TO"] = "xml"

        # saving the default java implementation
        self.default = det.ScenarioEventBasedMixin.compute_ground_motion_field

        self.grid = self.job.region.grid

        self.job.to_kvs()
Esempio n. 15
0
    def setUp(self):
        self.job = helpers.job_from_file(os.path.join(helpers.DATA_DIR,
                                         'config.gem'))

        self.grid = shapes.Grid(shapes.Region.from_coordinates(
            [(1.0, 3.0), (1.0, 4.0), (2.0, 4.0), (2.0, 3.0)]),
            1.0)

        # this is the expected output of grid_assets_iterator and an input of
        # asset_losses_per_site
        self.grid_assets = [
            (shapes.GridPoint(self.grid, 0, 0), GRID_ASSETS[(0, 0)]),
            (shapes.GridPoint(self.grid, 1, 0), GRID_ASSETS[(0, 1)]),
            (shapes.GridPoint(self.grid, 0, 1), GRID_ASSETS[(1, 0)]),
            (shapes.GridPoint(self.grid, 1, 1), GRID_ASSETS[(1, 1)])]
Esempio n. 16
0
    def test_read_sites_from_exposure(self):
        """
        Test reading site data from an exposure file using
        :py:function:`openquake.risk.job.general.read_sites_from_exposure`.
        """
        job_config_file = helpers.smoketest_file('simplecase/config.gem')

        test_job = helpers.job_from_file(job_config_file)

        expected_sites = [
            shapes.Site(-118.077721, 33.852034),
            shapes.Site(-118.067592, 33.855398),
            shapes.Site(-118.186739, 33.779013)]

        self.assertEqual(expected_sites,
            general.read_sites_from_exposure(test_job))
Esempio n. 17
0
    def test_read_sites_from_exposure(self):
        # Test reading site data from an exposure file using
        # :py:function:`openquake.risk.read_sites_from_exposure`.
        job_cfg = helpers.testdata_path('simplecase/config.gem')

        test_job = helpers.job_from_file(job_cfg)
        calc = core.EventBasedRiskCalculator(test_job)
        calc.store_exposure_assets()

        expected_sites = set([
            shapes.Site(-118.077721, 33.852034),
            shapes.Site(-118.067592, 33.855398),
            shapes.Site(-118.186739, 33.779013)])

        actual_sites = set(read_sites_from_exposure(test_job))

        self.assertEqual(expected_sites, actual_sites)
Esempio n. 18
0
    def test_spawn_job_supervisor(self):
        class FakeProcess(object):
            pid = 42

        oq_config.Config().cfg['supervisor']['exe'] = '/supervise me'
        job = helpers.job_from_file(helpers.get_data_path(CONFIG_FILE))

        with patch('subprocess.Popen') as popen:
            popen.return_value = FakeProcess()
            spawn_job_supervisor(job_id=job.job_id, pid=54321)
            self.assertEqual(popen.call_count, 1)
            self.assertEqual(popen.call_args,
                             ((['/supervise me', str(job.job_id), '54321'], ),
                              {'env': os.environ}))
            job = OqJob.objects.get(pk=job.job_id)
            self.assertEqual(job.supervisor_pid, 42)
            self.assertEqual(job.job_pid, 54321)
Esempio n. 19
0
    def test_read_sites_from_exposure(self):
        # Test reading site data from an exposure file using
        # :py:function:`openquake.risk.read_sites_from_exposure`.
        job_cfg = helpers.testdata_path('simplecase/config.gem')

        test_job = helpers.job_from_file(job_cfg)
        calc = core.EventBasedRiskCalculator(test_job)
        calc.store_exposure_assets()

        expected_sites = set([
            shapes.Site(-118.077721, 33.852034),
            shapes.Site(-118.067592, 33.855398),
            shapes.Site(-118.186739, 33.779013)])

        actual_sites = set(engine.read_sites_from_exposure(test_job))

        self.assertEqual(expected_sites, actual_sites)
Esempio n. 20
0
    def test_hazard_engine_jobber_runs(self):
        """Construction of LogicTreeProcessor in Java should not throw
        errors, and should have params loaded from KVS."""

        hazengine = helpers.job_from_file(TEST_JOB_FILE)
        self.generated_files.append(hazengine.super_config_path)
        with mixins.Mixin(hazengine, openquake.hazard.job.HazJobMixin):
            hazengine.execute()

            source_model_key = tokens.source_model_key(hazengine.job_id)
            self.kvs_client.get(source_model_key)
            # We have the random seed in the config, so this is guaranteed
            # TODO(JMC): Add this back in
            # self.assertEqual(source_model, TEST_SOURCE_MODEL)

            gmpe_key = tokens.gmpe_key(hazengine.job_id)
            self.kvs_client.get(gmpe_key)
Esempio n. 21
0
    def test_default_validators_scenario_damage_job(self):
        # Ensures that a Scenario Damage job always includes the
        # :class:`openquake.job.config.ScenarioComputationValidator` and
        # the :class:`openquake.job.config.ScenarioDamageComputationValidator`.

        scenario_job_path = helpers.demo_file(
            "scenario_damage_risk/config.gem")

        scenario_job = helpers.job_from_file(scenario_job_path)

        validators = config.default_validators(
            scenario_job.sections, scenario_job.params)

        self.assertTrue(any(isinstance(
                v, ScenarioComputationValidator) for v in validators))

        self.assertTrue(any(isinstance(
                v, ScenarioDamageValidator) for v in validators))
    def setUp(self):
        kvs.flush()

        flags.FLAGS.include_defaults = False

        self.job = helpers.job_from_file(DETERMINISTIC_SMOKE_TEST)

        self.job.params[NUMBER_OF_CALC_KEY] = "1"

        self.job.params['SERIALIZE_RESULTS_TO'] = 'xml'

        # saving the default java implementation
        self.default = \
            det.DeterministicEventBasedMixin.compute_ground_motion_field

        self.grid = self.job.region.grid

        self.job.to_kvs()
Esempio n. 23
0
 def setUp(self):
     self.running_pid = 1324
     self.stopped_pid = 4312
     OqCalculation.objects.all().update(status='succeeded')
     job_pid = 1
     for status in ('pending', 'running', 'failed', 'succeeded'):
         for supervisor_pid in (self.running_pid, self.stopped_pid):
             job = job_from_file(get_data_path(CONFIG_FILE))
             job = OqCalculation.objects.get(id=job.job_id)
             job.status = status
             job.supervisor_pid = supervisor_pid
             job.job_pid = job_pid
             job_pid += 1
             job.save()
             if status == 'running' and supervisor_pid == self.stopped_pid:
                 self.dead_supervisor_job_id = job.id
                 self.dead_supervisor_job_pid = job.job_pid
     self.is_pid_running = patch('openquake.supervising.is_pid_running')
     self.is_pid_running = self.is_pid_running.start()
     self.is_pid_running.side_effect = lambda pid: pid != self.stopped_pid
Esempio n. 24
0
 def setUp(self):
     self.running_pid = 1324
     self.stopped_pid = 4312
     OqJob.objects.all().update(status='succeeded')
     job_pid = 1
     for status in ('pending', 'running', 'failed', 'succeeded'):
         for supervisor_pid in (self.running_pid, self.stopped_pid):
             job = job_from_file(get_data_path(CONFIG_FILE))
             job = OqJob.objects.get(id=job.job_id)
             job.status = status
             job.supervisor_pid = supervisor_pid
             job.job_pid = job_pid
             job_pid += 1
             job.save()
             if status == 'running' and supervisor_pid == self.stopped_pid:
                 self.dead_supervisor_job_id = job.id
                 self.dead_supervisor_job_pid = job.job_pid
     self.is_pid_running = patch('openquake.supervising.is_pid_running')
     self.is_pid_running = self.is_pid_running.start()
     self.is_pid_running.side_effect = lambda pid: pid != self.stopped_pid
Esempio n. 25
0
    def test_default_validators_scenario_damage_job(self):
        # Ensures that a Scenario Damage job always includes the
        # :class:`openquake.job.config.ScenarioComputationValidator` and
        # the :class:`openquake.job.config.ScenarioDamageComputationValidator`.

        scenario_job_path = helpers.demo_file(
            "scenario_damage_risk/config.gem")

        scenario_job = helpers.job_from_file(scenario_job_path)

        validators = config.default_validators(scenario_job.sections,
                                               scenario_job.params)

        self.assertTrue(
            any(
                isinstance(v, ScenarioComputationValidator)
                for v in validators))

        self.assertTrue(
            any(isinstance(v, ScenarioDamageValidator) for v in validators))
Esempio n. 26
0
 def setUp(self):
     self.running_pid = 1324
     self.stopped_pid = 4312
     OqJob.objects.all().update(status="succeeded")
     job_pid = 1
     for status in ("pending", "running", "failed", "succeeded"):
         for supervisor_pid in (self.running_pid, self.stopped_pid):
             job = job_from_file(get_data_path(CONFIG_FILE))
             job = OqJob.objects.get(id=job.job_id)
             job.status = status
             job.supervisor_pid = supervisor_pid
             job.job_pid = job_pid
             job_pid += 1
             job.save()
             if status == "running" and supervisor_pid == self.stopped_pid:
                 self.dead_supervisor_job_id = job.id
                 self.dead_supervisor_job_pid = job.job_pid
     self.is_pid_running = patch("openquake.supervising.is_pid_running")
     self.is_pid_running = self.is_pid_running.start()
     self.is_pid_running.side_effect = lambda pid: pid != self.stopped_pid
Esempio n. 27
0
    def test_compute_mgm_intensity(self):
        results = []
        block_id = 8801
        site = "Testville,TestLand"

        mgm_intensity = json.JSONDecoder().decode(MEAN_GROUND_INTENSITY)

        job_ids = [helpers.job_from_file(TEST_JOB_FILE).job_id
                   for _ in xrange(4)]
        for job_id in job_ids:
            mgm_key = tokens.mgm_key(job_id, block_id, site)
            self.kvs_client.set(mgm_key, MEAN_GROUND_INTENSITY)

            results.append(tasks.compute_mgm_intensity.apply_async(
                args=[job_id, block_id, site]))

        helpers.wait_for_celery_tasks(results)

        for result in results:
            self.assertEqual(mgm_intensity, result.get())
Esempio n. 28
0
    def test_compute_uhs(self):
        # Test the :function:`openquake.hazard.uhs.core.compute_uhs`
        # function. This function makes use of the Java `UHSCalculator` and
        # performs the main UHS computation.

        # The results of the computation are a sequence of Java `UHSResult`
        # objects.
        the_job = helpers.job_from_file(UHS_DEMO_CONFIG_FILE)

        site = Site(0.0, 0.0)

        helpers.store_hazard_logic_trees(the_job)

        uhs_results = compute_uhs(the_job, site)

        for i, result in enumerate(uhs_results):
            poe = result.getPoe()
            uhs = result.getUhs()

            self.assertEquals(self.UHS_RESULTS[i][0], poe)
            self.assertTrue(numpy.allclose(self.UHS_RESULTS[i][1],
                                           [x.value for x in uhs]))
Esempio n. 29
0
    def test_compute_uhs(self):
        # Test the :function:`openquake.hazard.uhs.core.compute_uhs`
        # function. This function makes use of the Java `UHSCalculator` and
        # performs the main UHS computation.

        # The results of the computation are a sequence of Java `UHSResult`
        # objects.
        the_job = helpers.job_from_file(UHS_DEMO_CONFIG_FILE)

        site = Site(0.0, 0.0)

        helpers.store_hazard_logic_trees(the_job)

        uhs_results = compute_uhs(the_job, site)

        for i, result in enumerate(uhs_results):
            poe = result.getPoe()
            uhs = result.getUhs()

            self.assertEquals(self.UHS_RESULTS[i][0], poe)
            self.assertTrue(numpy.allclose(self.UHS_RESULTS[i][1],
                                           [x.value for x in uhs]))
Esempio n. 30
0
    def test_compute_mgm_intensity(self):
        results = []
        block_id = 8801
        site = "Testville,TestLand"

        mgm_intensity = json.JSONDecoder().decode(MEAN_GROUND_INTENSITY)

        job_ids = [
            helpers.job_from_file(TEST_JOB_FILE).job_id for _ in xrange(4)
        ]
        for job_id in job_ids:
            mgm_key = tokens.mgm_key(job_id, block_id, site)
            self.kvs_client.set(mgm_key, MEAN_GROUND_INTENSITY)

            results.append(
                classical.compute_mgm_intensity.apply_async(
                    args=[job_id, block_id, site]))

        helpers.wait_for_celery_tasks(results)

        for result in results:
            self.assertEqual(mgm_intensity, result.get())
Esempio n. 31
0
    def test_generate_erf_returns_erf_via_kvs(self):
        results = []
        result_keys = []
        expected_values = {}

        job_ids = [helpers.job_from_file(TEST_JOB_FILE).job_id
                   for _ in xrange(4)]
        for job_id in job_ids:
            erf_key = tokens.erf_key(job_id)

            # Build the expected values
            expected_values[erf_key] = json.JSONEncoder().encode([job_id])

            # Get our result keys
            result_keys.append(erf_key)

            # Spawn our tasks.
            results.append(tasks.generate_erf.apply_async(args=[job_id]))

        helpers.wait_for_celery_tasks(results)

        result_values = self.kvs_client.get_multi(result_keys)

        self.assertEqual(result_values, expected_values)
Esempio n. 32
0
 def test_can_store_and_read_jobs_from_kvs(self):
     self.job = helpers.job_from_file(
         os.path.join(helpers.DATA_DIR, CONFIG_FILE))
     self.generated_files.append(self.job.super_config_path)
     self.assertEqual(self.job, Job.from_kvs(self.job.job_id))
     helpers.cleanup_loggers()
Esempio n. 33
0
 def test_job_with_only_hazard_config_only_has_hazard_section(self):
     job_with_only_hazard = \
         helpers.job_from_file(helpers.get_data_path(HAZARD_ONLY))
     self.assertEqual(["HAZARD"], job_with_only_hazard.sections)
Esempio n. 34
0
    def test_generate_hazard_curves_using_classical_psha(self):

        def verify_realization_haz_curves_stored_to_kvs(hazengine):
            """ This just tests to make sure there something in the KVS
            for each key in given list of keys. This does NOT test the
            actual results. """
            # TODO (LB): At some point we need to test the actual
            # results to verify they are correct

            realizations = int(
                hazengine.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])

            for realization in xrange(0, realizations):
                for site in hazengine.sites_to_compute():
                    key = tokens.hazard_curve_poes_key(
                        hazengine.job_id, realization, site)

                    value = self.kvs_client.get(key)
                    # LOG.debug("kvs value is %s" % value)
                    self.assertTrue(value is not None,
                        "no non-empty value found at KVS key")

        def verify_mean_haz_curves_stored_to_kvs(hazengine):
            """ Make sure that the keys and non-empty values for mean
            hazard curves have been written to KVS."""

            if hazengine.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true':

                LOG.debug("verifying KVS entries for mean hazard curves")
                for site in hazengine.sites_to_compute():
                    key = tokens.mean_hazard_curve_key(hazengine.job_id, site)
                    value = self.kvs_client.get(key)
                    self.assertTrue(
                        value is not None, "no value found at KVS key")

        def verify_mean_haz_maps_stored_to_kvs(hazengine):
            """ Make sure that the keys and non-empty values for mean
            hazard maps have been written to KVS."""

            if (hazengine.params[classical_psha.POES_PARAM_NAME] != '' and
                hazengine.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \
                'true'):

                LOG.debug("verifying KVS entries for mean hazard maps")

                for poe in hazengine.poes_hazard_maps:
                    for site in hazengine.sites_to_compute():
                        key = tokens.mean_hazard_map_key(
                            hazengine.job_id, site, poe)
                        value = self.kvs_client.get(key)
                        self.assertTrue(
                            value is not None, "no value found at KVS key")

        def verify_quantile_haz_curves_stored_to_kvs(hazengine):
            """ Make sure that the keys and non-empty values for quantile
            hazard curves have been written to KVS."""

            quantiles = hazengine.quantile_levels

            LOG.debug("verifying KVS entries for quantile hazard curves, "\
                "%s quantile values" % len(quantiles))

            for quantile in quantiles:
                for site in hazengine.sites_to_compute():
                    key = tokens.quantile_hazard_curve_key(
                        hazengine.job_id, site, quantile)
                    value = self.kvs_client.get(key)
                    self.assertTrue(
                        value is not None, "no value found at KVS key")

        def verify_quantile_haz_maps_stored_to_kvs(hazengine):
            """ Make sure that the keys and non-empty values for quantile
            hazard maps have been written to KVS."""

            quantiles = hazengine.quantile_levels

            if (hazengine.params[classical_psha.POES_PARAM_NAME] != '' and
                len(quantiles) > 0):

                poes = hazengine.poes_hazard_maps

                LOG.debug("verifying KVS entries for quantile hazard maps, "\
                    "%s quantile values, %s PoEs" % (
                    len(quantiles), len(poes)))

                for quantile in quantiles:
                    for poe in poes:
                        for site in hazengine.sites_to_compute():
                            key = tokens.quantile_hazard_map_key(
                                hazengine.job_id, site, poe, quantile)
                            value = self.kvs_client.get(key)
                            self.assertTrue(
                                value is not None,
                                "no value found at KVS key %s" % key)

        def verify_realization_haz_curves_stored_to_nrml(hazengine):
            """Tests that a NRML file has been written for each realization,
            and that this file validates against the NRML schema.
            Does NOT test if results in NRML file are correct.
            """
            realizations = int(
                hazengine.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])
            for realization in xrange(0, realizations):

                nrml_path = os.path.join(
                    "smoketests/classical_psha_simple/computed_output",
                    hazengine.hazard_curve_filename(realization))

                LOG.debug("validating NRML file %s" % nrml_path)

                self.assertTrue(xml.validates_against_xml_schema(
                    nrml_path, NRML_SCHEMA_PATH),
                    "NRML instance file %s does not validate against schema" \
                    % nrml_path)

        def verify_mean_haz_curves_stored_to_nrml(hazengine):
            """Tests that a mean hazard curve NRML file has been written,
            and that this file validates against the NRML schema.
            Does NOT test if results in NRML file are correct.
            """

            if hazengine.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true':
                nrml_path = os.path.join(
                    "smoketests/classical_psha_simple/computed_output",
                    hazengine.mean_hazard_curve_filename())

                LOG.debug("validating NRML file %s" % nrml_path)

                self.assertTrue(xml.validates_against_xml_schema(
                    nrml_path, NRML_SCHEMA_PATH),
                    "NRML instance file %s does not validate against schema" \
                    % nrml_path)

        def verify_mean_haz_maps_stored_to_nrml(hazengine):
            """Tests that a mean hazard map NRML file has been written,
            and that this file validates against the NRML schema.
            Does NOT test if results in NRML file are correct.
            """
            if (hazengine.params[classical_psha.POES_PARAM_NAME] != '' and
                hazengine.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \
                'true'):

                for poe in hazengine.poes_hazard_maps:
                    nrml_path = os.path.join(
                        "smoketests/classical_psha_simple/computed_output",
                        hazengine.mean_hazard_map_filename(poe))

                    LOG.debug("validating NRML file for mean hazard map %s" \
                        % nrml_path)

                    self.assertTrue(xml.validates_against_xml_schema(
                        nrml_path, NRML_SCHEMA_PATH),
                        "NRML instance file %s does not validate against "\
                        "schema" % nrml_path)

        def verify_quantile_haz_curves_stored_to_nrml(hazengine):
            """Tests that quantile hazard curve NRML files have been written,
            and that these file validate against the NRML schema.
            Does NOT test if results in NRML files are correct.
            """

            for quantile in hazengine.quantile_levels:

                nrml_path = os.path.join(
                    "smoketests/classical_psha_simple/computed_output",
                    hazengine.quantile_hazard_curve_filename(quantile))

                LOG.debug("validating NRML file for quantile hazard curve: "\
                    "%s" % nrml_path)

                self.assertTrue(xml.validates_against_xml_schema(
                    nrml_path, NRML_SCHEMA_PATH),
                    "NRML instance file %s does not validate against schema" \
                    % nrml_path)

        def verify_quantile_haz_maps_stored_to_nrml(hazengine):
            """Tests that quantile hazard map NRML files have been written,
            and that these file validate against the NRML schema.
            Does NOT test if results in NRML files are correct.
            """

            quantiles = hazengine.quantile_levels

            if (hazengine.params[classical_psha.POES_PARAM_NAME] != '' and
                len(quantiles) > 0):

                for poe in hazengine.poes_hazard_maps:
                    for quantile in quantiles:
                        nrml_path = os.path.join(
                            "smoketests/classical_psha_simple/computed_output",
                            hazengine.quantile_hazard_map_filename(quantile,
                                                                   poe))

                        LOG.debug("validating NRML file for quantile hazard "\
                            "map: %s" % nrml_path)

                        self.assertTrue(xml.validates_against_xml_schema(
                            nrml_path, NRML_SCHEMA_PATH),
                            "NRML instance file %s does not validate against "\
                            "schema" % nrml_path)

        test_file_path = helpers.smoketest_file(
            "classical_psha_simple/config.gem")

        hazengine = helpers.job_from_file(test_file_path)

        with mixins.Mixin(hazengine, openquake.hazard.job.HazJobMixin):
            hazengine.execute()

            verify_realization_haz_curves_stored_to_kvs(hazengine)
            verify_realization_haz_curves_stored_to_nrml(hazengine)

            # hazard curves: check results of mean and quantile computation
            verify_mean_haz_curves_stored_to_kvs(hazengine)
            verify_quantile_haz_curves_stored_to_kvs(hazengine)

            verify_mean_haz_curves_stored_to_nrml(hazengine)
            verify_quantile_haz_curves_stored_to_nrml(hazengine)

            # hazard maps: check results of mean and quantile computation
            verify_mean_haz_maps_stored_to_kvs(hazengine)
            verify_quantile_haz_maps_stored_to_kvs(hazengine)

            verify_mean_haz_maps_stored_to_nrml(hazengine)
            verify_quantile_haz_maps_stored_to_nrml(hazengine)
Esempio n. 35
0
 def test_basic_generate_erf_keeps_order(self):
     job_ids = [helpers.job_from_file(TEST_JOB_FILE).job_id
                for _ in xrange(4)]
     results = map(tasks.generate_erf.delay, job_ids)
     self.assertEqual(job_ids, [result.get() for result in results])
Esempio n. 36
0
    def setUp(self):
        self.job_ctxt = helpers.job_from_file(
            os.path.join(helpers.DATA_DIR, 'config.gem'))
        [input] = models.inputs4job(self.job_ctxt.job_id,
                                    input_type="exposure")
        owner = models.OqUser.objects.get(user_name="openquake")
        emdl = input.model()
        if not emdl:
            emdl = models.ExposureModel(owner=owner,
                                        input=input,
                                        description="RCT exposure model",
                                        category="RCT villas",
                                        stco_unit="roofs",
                                        stco_type="aggregated")
            emdl.save()

        asset_data = [
            ((0, 0), shapes.Site(10.0, 10.0), {
                u'stco': 5.07,
                u'asset_ref': u'a5625',
                u'taxonomy': u'rctc-ad-83'
            }),
            ((0, 1), shapes.Site(10.1, 10.0), {
                u'stco': 5.63,
                u'asset_ref': u'a5629',
                u'taxonomy': u'rctc-ad-83'
            }),
            ((1, 0), shapes.Site(10.0, 10.1), {
                u'stco': 11.26,
                u'asset_ref': u'a5630',
                u'taxonomy': u'rctc-ad-83'
            }),
            ((1, 1), shapes.Site(10.1, 10.1), {
                u'stco': 5.5,
                u'asset_ref': u'a5636',
                u'taxonomy': u'rctc-ad-83'
            }),
        ]
        assets = emdl.exposuredata_set.filter(taxonomy="rctc-ad-83"). \
                                       order_by("id")
        for idx, (gcoo, site, adata) in enumerate(asset_data):
            if not assets:
                location = geos.GEOSGeometry(site.point.to_wkt())
                asset = models.ExposureData(exposure_model=emdl,
                                            site=location,
                                            **adata)
                asset.save()
            else:
                asset = assets[idx]
            GRID_ASSETS[gcoo] = asset

        self.grid = shapes.Grid(
            shapes.Region.from_coordinates([(10.0, 10.0), (10.0, 10.1),
                                            (10.1, 10.1), (10.1, 10.0)]), 0.1)

        # this is the expected output of grid_assets_iterator and an input of
        # asset_losses_per_site
        self.grid_assets = [
            (shapes.GridPoint(self.grid, 0, 0), GRID_ASSETS[(0, 0)]),
            (shapes.GridPoint(self.grid, 1, 0), GRID_ASSETS[(0, 1)]),
            (shapes.GridPoint(self.grid, 0, 1), GRID_ASSETS[(1, 0)]),
            (shapes.GridPoint(self.grid, 1, 1), GRID_ASSETS[(1, 1)])
        ]
Esempio n. 37
0
 def test_job_with_only_hazard_config_only_has_hazard_section(self):
     job_with_only_hazard = \
         helpers.job_from_file(helpers.get_data_path(HAZARD_ONLY))
     self.assertEqual(["HAZARD"], job_with_only_hazard.sections)