示例#1
0
    def test_calculator_for_task(self):
        """Load up a sample calculation (into the db and cache) and make sure
        we can instantiate the correct calculator for a given calculation id.
        """
        from openquake.calculators.hazard.classical.core import (
            ClassicalHazardCalculator)
        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            demo_file('simple_fault_demo_hazard/config.gem'), job)

        job_ctxt = engine.JobContext(params,
                                     job.id,
                                     oq_job_profile=job_profile,
                                     oq_job=job)
        job_ctxt.to_kvs()

        with patch('openquake.utils.tasks.get_running_job') as grc_mock:

            # Loading of the JobContext is done by
            # `get_running_job`, which is covered by other tests.
            # So, we just want to make sure that it's called here.
            grc_mock.return_value = job_ctxt

            calculator = tasks.calculator_for_task(job.id, 'hazard')

            self.assertTrue(isinstance(calculator, ClassicalHazardCalculator))
            self.assertEqual(1, grc_mock.call_count)
示例#2
0
    def test_hazard_input_is_the_exposure_site(self):
        # when `COMPUTE_HAZARD_AT_ASSETS_LOCATIONS` is specified,
        # the hazard must be looked up on the same risk location
        # (the input parameter of the function)
        params = {config.COMPUTE_HAZARD_AT_ASSETS: True}
        job_ctxt = engine.JobContext(params, None)

        self.assertEqual(shapes.Site(1.0, 1.0),
                         hazard_input_site(job_ctxt, shapes.Site(1.0, 1.0)))
示例#3
0
    def setUp(self):
        self.job = engine.prepare_job()
        self.job_profile, params, sections = engine.import_job_profile(
            UHS_DEMO_CONFIG_FILE, self.job)

        self.job_ctxt = engine.JobContext(
            params, self.job.id, sections=sections,
            serialize_results_to=['db'], oq_job_profile=self.job_profile,
            oq_job=self.job)
        self.job_ctxt.to_kvs()
        self.job_id = self.job_ctxt.job_id
示例#4
0
    def setUp(self):
        self.job = engine.prepare_job()
        self.job_profile, self.params, _sections = (engine.import_job_profile(
            demo_file('simple_fault_demo_hazard/config.gem'), self.job))

        self.params['debug'] = 'warn'

        # Cache the calc proxy data into the kvs:
        job_ctxt = engine.JobContext(self.params,
                                     self.job.id,
                                     oq_job_profile=self.job_profile,
                                     oq_job=self.job)
        job_ctxt.to_kvs()
示例#5
0
    def test__serialize_xml_filenames(self):
        # Test that the file names of the loss XML artifacts are correct.
        # See https://bugs.launchpad.net/openquake/+bug/894706.
        expected_lrc_file_name = (
            'losscurves-block-#%(job_id)s-block#%(block)s.xml')
        expected_lr_file_name = (
            'losscurves-loss-block-#%(job_id)s-block#%(block)s.xml')

        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        job_ctxt = engine.JobContext(params,
                                     job.id,
                                     sections=sections,
                                     serialize_results_to=['xml', 'db'],
                                     oq_job_profile=job_profile,
                                     oq_job=job)

        calculator = ClassicalRiskCalculator(job_ctxt)

        with helpers.patch('openquake.writer.FileWriter.serialize'):
            # The 'curves' key in the kwargs just needs to be present;
            # because of the serialize mock in place above, it doesn't need
            # to have a real value.

            # First, we test loss ratio curve output,
            # then we'll do the same test for loss curve output.

            # We expect to get a single file path back.
            [file_path] = calculator._serialize(
                0, **dict(curve_mode='loss_ratio', curves=[]))

            _dir, file_name = os.path.split(file_path)

            self.assertEqual(
                expected_lrc_file_name % dict(job_id=job.id, block=0),
                file_name)

            # The same test again, except for loss curves this time.
            [file_path
             ] = calculator._serialize(0, **dict(curve_mode='loss', curves=[]))

            _dir, file_name = os.path.split(file_path)

            self.assertEqual(
                expected_lr_file_name % dict(job_id=job.id, block=0),
                file_name)
示例#6
0
    def test_hazard_input_is_the_cell_center(self):
        # when `COMPUTE_HAZARD_AT_ASSETS_LOCATIONS` is not specified,
        # the hazard must be looked up on the center of the cell
        # where the given site falls in
        params = {config.INPUT_REGION: \
            "1.0, 1.0, 2.0, 1.0, 2.0, 2.0, 1.0, 2.0",
            config.REGION_GRID_SPACING: 0.5}

        job_ctxt = engine.JobContext(params, None)

        self.assertEqual(shapes.Site(1.0, 1.0),
                         hazard_input_site(job_ctxt, shapes.Site(1.2, 1.2)))

        self.assertEqual(shapes.Site(1.5, 1.5),
                         hazard_input_site(job_ctxt, shapes.Site(1.6, 1.6)))
示例#7
0
    def test_write_output(self):
        # Test that the loss map writers are properly called when
        # write_output is invoked.
        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        # Set conditional loss poe so that loss maps are created.
        # If this parameter is not specified, no loss maps will be serialized
        # at the end of the job.
        params['CONDITIONAL_LOSS_POE'] = '0.01'
        job_profile.conditional_loss_poe = [0.01]
        job_profile.save()

        job_ctxt = engine.JobContext(params,
                                     job.id,
                                     sections=sections,
                                     serialize_results_to=['xml', 'db'],
                                     oq_job_profile=job_profile,
                                     oq_job=job)

        calculator = ClassicalRiskCalculator(job_ctxt)

        # Mock the composed loss map serializer:
        with helpers.patch('openquake.writer.CompositeWriter'
                           '.serialize') as writer_mock:
            calculator.write_output()

            self.assertEqual(1, writer_mock.call_count)

            # Now test that the composite writer got the correct
            # 'serialize to' instructions. The composite writer should have
            # 1 DB and 1 XML loss map serializer:
            composite_writer = writer_mock.call_args[0][0]
            writers = composite_writer.writers

            self.assertEqual(2, len(writers))
            # We don't assume anything about the order of the writers,
            # and we don't care anyway in this test:
            self.assertTrue(
                any(isinstance(w, LossMapDBWriter) for w in writers))
            self.assertTrue(
                any(
                    isinstance(w, LossMapNonScenarioXMLWriter)
                    for w in writers))
示例#8
0
    def setUp(self):
        cfg_path = helpers.demo_file(
            'probabilistic_event_based_risk/config.gem')

        job = engine.prepare_job()
        jp, params, sections = engine.import_job_profile(cfg_path, job)

        job_ctxt = engine.JobContext(
            params, 1, sections=sections, base_path='/tmp',
            serialize_results_to=['db', 'xml'], oq_job_profile=jp, oq_job=job)
        job_ctxt.blocks_keys = []

        self.calculator = EventBasedRiskCalculator(job_ctxt)
        self.calculator.store_exposure_assets = lambda: None
        self.calculator.store_fragility_model = lambda: None
        self.calculator.store_vulnerability_model = lambda: None
        self.calculator.partition = lambda: None
示例#9
0
    def test__launch_job_calls_core_calc_methods(self):
        # The `Calculator` interface defines 4 general methods:
        # - initialize
        # - pre_execute
        # - execute
        # - post_execute
        # When `_launch_job` is called, each of these methods should be
        # called once per job type (hazard, risk).

        # Calculation setup:
        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        job_ctxt = engine.JobContext(
            params, job.id, sections=sections,
            serialize_results_to=['xml', 'db'],
            oq_job_profile=job_profile, oq_job=job)

        # Mocking setup:
        cls_haz_calc = ('openquake.calculators.hazard.classical.core'
                        '.ClassicalHazardCalculator')
        cls_risk_calc = ('openquake.calculators.risk.classical.core'
                         '.ClassicalRiskCalculator')
        methods = ('initialize', 'pre_execute', 'execute', 'post_execute')
        haz_patchers = [helpers.patch('%s.%s' % (cls_haz_calc, m))
                        for m in methods]
        risk_patchers = [helpers.patch('%s.%s' % (cls_risk_calc, m))
                         for m in methods]

        haz_mocks = [p.start() for p in haz_patchers]
        risk_mocks = [p.start() for p in risk_patchers]

        # Call the function under test:
        engine._launch_job(job_ctxt, sections)

        self.assertTrue(all(x.call_count == 1 for x in haz_mocks))
        self.assertTrue(all(x.call_count == 1 for x in risk_mocks))

        # Tear down the mocks:
        for p in haz_patchers:
            p.stop()
        for p in risk_patchers:
            p.stop()
示例#10
0
    def test_with_risk_jobs_we_can_trigger_hazard_only_on_exposure_sites(self):
        # When we have hazard and risk jobs, we can ask to trigger
        # the hazard computation only on the sites specified
        # in the exposure file.
        self.params['COMPUTE_HAZARD_AT_ASSETS_LOCATIONS'] = True

        job_ctxt = engine.JobContext(
            self.params, self.job.id, sections=self.sections,
            oq_job_profile=self.jp)

        calc = EventBasedRiskCalculator(job_ctxt)
        calc.store_exposure_assets()

        expected_sites = set([
            shapes.Site(-118.077721, 33.852034),
            shapes.Site(-118.067592, 33.855398),
            shapes.Site(-118.186739, 33.779013)])

        actual_sites = set(job_ctxt.sites_to_compute())
        self.assertEqual(expected_sites, actual_sites)
示例#11
0
def prepare_job_context(path_to_cfg):
    """Given a path to a config file, prepare and return a
    :class:`openquake.engine.JobContext`. This convenient because it can be
    immediately passed to a calculator constructor.

    This also creates the necessary job and oq_job_profile records.
    """
    job = engine.prepare_job()

    cfg = demo_file(path_to_cfg)

    job_profile, params, sections = engine.import_job_profile(
        cfg, job, force_inputs=True)

    job_ctxt = engine.JobContext(params,
                                 job.id,
                                 sections=sections,
                                 oq_job_profile=job_profile,
                                 oq_job=job)

    return job_ctxt
示例#12
0
    def test_partition(self):
        job_cfg = helpers.demo_file('classical_psha_based_risk/config.gem')
        job_profile, params, sections = engine.import_job_profile(
            job_cfg, self.job, force_inputs=True)
        job_ctxt = engine.JobContext(params,
                                     self.job.id,
                                     sections=sections,
                                     oq_job_profile=job_profile)

        calc = general.BaseRiskCalculator(job_ctxt)
        calc.store_exposure_assets()

        calc.partition()

        expected_blocks_keys = [0]
        self.assertEqual(expected_blocks_keys, job_ctxt.blocks_keys)

        expected_sites = [shapes.Site(-122.0, 38.225)]
        expected_block = general.Block(self.job.id, 0, expected_sites)

        actual_block = general.Block.from_kvs(self.job.id, 0)
        self.assertEqual(expected_block, actual_block)
        self.assertEqual(expected_block.sites, actual_block.sites)