Example #1
0
 def setUp(self):
     self.db_patch = patch(
         'openquake.engine.utils.monitor._db_cnode_status')
     self.live_patch = (
         patch('openquake.engine.utils.monitor._live_cnode_status'))
     self.db_mock = self.db_patch.start()
     self.live_mock = self.live_patch.start()
Example #2
0
    def test_failed_job_lifecycle(self):
        with patch('openquake.job.Job.from_file') as from_file:

            # called in place of Job.launch
            def test_status_running_and_fail():
                self.assertEquals('running', self._job_status())

                raise Exception('OMG!')

            # replaces Job.launch with a mock
            def patch_job_launch(*args, **kwargs):
                self.job = self.job_from_file(*args, **kwargs)
                self.job.launch = mock.Mock(
                    side_effect=test_status_running_and_fail)

                self.assertEquals('pending', self._job_status())

                return self.job

            from_file.side_effect = patch_job_launch

            with patch('openquake.job.spawn_job_supervisor'):
                self.assertRaises(Exception, run_job,
                                helpers.get_data_path(CONFIG_FILE), 'db')

        self.assertEquals(1, self.job.launch.call_count)
        self.assertEquals('failed', self._job_status())
Example #3
0
    def test_failed_job_lifecycle(self):
        def test_status_running_and_fail(*args):
            self.assertEquals("running", self._calculation_status())

            raise Exception("OMG!")

        def patch_job_launch(*args, **kwargs):
            self.job = self.job_from_file(*args, **kwargs)

            self.assertEquals("pending", self._calculation_status())

            return self.job

        before_launch = engine._launch_calculation
        try:
            engine._launch_calculation = mock.Mock(side_effect=test_status_running_and_fail)

            with patch("openquake.engine._job_from_file") as from_file:
                from_file.side_effect = patch_job_launch

                with patch("os.fork", mocksignature=False) as fork:
                    fork.return_value = 0
                    self.assertRaises(Exception, engine.run_calculation, self.calc_proxy, self.params, self.sections)

            self.assertEquals(1, engine._launch_calculation.call_count)
            self.assertEquals("failed", self._calculation_status())
        finally:
            engine._launch_calculation = before_launch
Example #4
0
    def test_successful_job_lifecycle(self):
        with patch('openquake.job.Job.from_file') as from_file:

            # called in place of Job.launch
            def test_status_running_and_succeed():
                self.assertEquals('running', self._job_status())

                return []

            # replaces Job.launch with a mock
            def patch_job_launch(*args, **kwargs):
                self.job = self.job_from_file(*args, **kwargs)
                self.job.launch = mock.Mock(
                    side_effect=test_status_running_and_succeed)

                self.assertEquals('pending', self._job_status())

                return self.job

            from_file.side_effect = patch_job_launch

            with patch('openquake.job.spawn_job_supervisor'):
                run_job(helpers.get_data_path(CONFIG_FILE), 'db')

        self.assertEquals(1, self.job.launch.call_count)
        self.assertEquals('succeeded', self._job_status())
Example #5
0
    def setUp(self):
        self.job = engine.prepare_job()
        self.calc = disagg_core.DisaggHazardCalculator(self.job)

        # Mock `disagg_task_arg_gen`
        disagg_path = 'openquake.engine.calculators.hazard.disaggregation'
        self.disagg_tag_patch = helpers.patch(
            '%s.core.DisaggHazardCalculator.disagg_task_arg_gen'
            % disagg_path)
        self.disagg_tag_mock = self.disagg_tag_patch.start()
        # fake disagg task arg generator:
        disagg_tag = iter(xrange(3))
        self.disagg_tag_mock.return_value = disagg_tag

        # Mock `haz_general.queue_next`
        base_path = 'openquake.engine.calculators.base'
        self.queue_next_patch = helpers.patch('%s.queue_next' % base_path)
        self.queue_next_mock = self.queue_next_patch.start()

        # Mock `finalize_hazard_curves`
        general_path = 'openquake.engine.calculators.hazard.general'
        self.finalize_curves_patch = helpers.patch(
            '%s.BaseHazardCalculator.finalize_hazard_curves'
            % general_path)
        self.finalize_curves_mock = self.finalize_curves_patch.start()
Example #6
0
    def test_successful_job_lifecycle(self):
        def test_status_running_and_succeed(*args):
            self.assertEqual("running", self._calculation_status())

            return []

        def patch_job_launch(*args, **kwargs):
            self.job = self.job_from_file(*args, **kwargs)

            self.assertEqual("pending", self._calculation_status())

            return self.job

        before_launch = engine._launch_job
        try:
            engine._launch_job = mock.Mock(side_effect=test_status_running_and_succeed)

            with patch("openquake.engine._job_from_file") as from_file:
                from_file.side_effect = patch_job_launch

                with patch("os.fork", mocksignature=False) as fork:
                    fork.return_value = 0
                    engine.run_job(self.job, self.params, self.sections)

            self.assertEqual(1, engine._launch_job.call_count)
            self.assertEqual("succeeded", self._calculation_status())
        finally:
            engine._launch_job = before_launch
Example #7
0
    def test_failed_job_lifecycle(self):

        def test_status_running_and_fail(*args):
            self.assertEqual('running', self._calculation_status())

            raise Exception('OMG!')

        def patch_job_launch(*args, **kwargs):
            self.job = self.job_from_file(*args, **kwargs)

            self.assertEqual('pending', self._calculation_status())

            return self.job

        before_launch = engine._launch_job
        try:
            engine._launch_job = mock.Mock(
                side_effect=test_status_running_and_fail)

            with patch('openquake.engine._job_from_file') as from_file:
                from_file.side_effect = patch_job_launch

                with patch('os.fork', mocksignature=False) as fork:
                    fork.return_value = 0
                    self.assertRaises(Exception, engine.run_job,
                                      self.job, self.params, self.sections)

            self.assertEqual(1, engine._launch_job.call_count)
            self.assertEqual('failed', self._calculation_status())
        finally:
            engine._launch_job = before_launch
Example #8
0
    def test_pre_execute(self):
        # Most of the pre-execute functionality is implement in other methods.
        # For this test, just make sure each method gets called.
        base_path = ('openquake.engine.calculators.hazard.classical.core'
                     '.ClassicalHazardCalculator')
        init_src_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_sources'))
        init_sm_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_site_model'))
        init_rlz_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_realizations'))
        record_stats_patch = helpers.patch(
            '%s.%s' % (base_path, 'record_init_stats'))
        init_pr_data_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_pr_data'))
        patches = (init_src_patch, init_sm_patch, init_rlz_patch,
                   record_stats_patch, init_pr_data_patch)

        mocks = [p.start() for p in patches]

        # we don't expect the site collection to be loaded yet:
        self.assertIsNone(self.calc.hc._site_collection)

        self.calc.pre_execute()

        # make sure the site_collection is loaded:
        self.assertIsNotNone(self.calc.hc._site_collection)

        for i, m in enumerate(mocks):
            self.assertEqual(1, m.call_count)
            m.stop()
            patches[i].stop()
Example #9
0
    def test_pre_execute(self):
        base_path = ('openquake.engine.calculators.hazard.disaggregation.core'
                     '.DisaggHazardCalculator')
        init_src_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_sources'))
        init_rlz_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_realizations'))
        record_stats_patch = helpers.patch(
            '%s.%s' % (base_path, 'record_init_stats'))
        init_pr_data_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_pr_data'))
        patches = (init_src_patch, init_rlz_patch,
                   record_stats_patch, init_pr_data_patch)

        mocks = [p.start() for p in patches]

        self.calc.pre_execute()

        # make sure the site_collection is loaded:
        self.assertIsNotNone(self.calc.hc.site_collection)

        for i, m in enumerate(mocks):
            self.assertEqual(1, m.call_count)
            m.stop()
            patches[i].stop()
Example #10
0
    def test_pre_execute(self):
        base_path = ('openquake.engine.calculators.hazard.disaggregation.core'
                     '.DisaggHazardCalculator')
        init_src_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_sources'))
        init_rlz_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_realizations'))
        record_stats_patch = helpers.patch(
            '%s.%s' % (base_path, 'record_init_stats'))
        init_pr_data_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_pr_data'))
        patches = (init_src_patch, init_rlz_patch,
                   record_stats_patch, init_pr_data_patch)

        mocks = [p.start() for p in patches]

        self.calc.pre_execute()

        # make sure the site_collection is loaded:
        self.assertIsNotNone(self.calc.hc.site_collection)

        for i, m in enumerate(mocks):
            self.assertEqual(1, m.call_count)
            m.stop()
            patches[i].stop()
Example #11
0
    def test_parameterize_sites_with_site_model(self):
        job_ctxt = helpers.prepare_job_context(
            helpers.demo_file(
                'simple_fault_demo_hazard/config_with_site_model.gem'))

        calc = classical.ClassicalHazardCalculator(job_ctxt)
        calc.initialize()

        # This tests to ensure that the `initialize` implementation for this
        # calculator properly stores the site model in the DB.

        # NOTE: If this test ever breaks, it's probably because the
        # ClassicalHazardCalculator is no longer calling the `initalize` code
        # in its super class (BaseHazardCalculator).
        site_model = hazard_general.get_site_model(job_ctxt.oq_job.id)
        self.assertIsNotNone(site_model)

        set_params_patch = helpers.patch(
            'openquake.calculators.hazard.general.set_java_site_parameters')
        closest_data_patch = helpers.patch(
            'openquake.calculators.hazard.general.get_closest_site_model_data')
        sp_mock = set_params_patch.start()
        cd_mock = closest_data_patch.start()

        try:
            calc.parameterize_sites(job_ctxt.sites_to_compute())

            exp_call_count = len(job_ctxt.sites_to_compute())
            self.assertEqual(exp_call_count, sp_mock.call_count)
            self.assertEqual(exp_call_count, cd_mock.call_count)

        finally:
            # tear down the patches
            set_params_patch.stop()
            closest_data_patch.stop()
Example #12
0
 def setUp(self):
     self.db_patch = patch(
         'openquake.engine.utils.monitor._db_cnode_status')
     self.live_patch = (
         patch('openquake.engine.utils.monitor._live_cnode_status'))
     self.db_mock = self.db_patch.start()
     self.live_mock = self.live_patch.start()
Example #13
0
    def setUp(self):
        self.job = engine.prepare_job()
        self.calc = disagg_core.DisaggHazardCalculator(self.job)

        # Mock `disagg_task_arg_gen`
        disagg_path = 'openquake.engine.calculators.hazard.disaggregation'
        self.disagg_tag_patch = helpers.patch(
            '%s.core.DisaggHazardCalculator.disagg_task_arg_gen'
            % disagg_path)
        self.disagg_tag_mock = self.disagg_tag_patch.start()
        # fake disagg task arg generator:
        disagg_tag = iter(xrange(3))
        self.disagg_tag_mock.return_value = disagg_tag

        # Mock `haz_general.queue_next`
        base_path = 'openquake.engine.calculators.base'
        self.queue_next_patch = helpers.patch('%s.queue_next' % base_path)
        self.queue_next_mock = self.queue_next_patch.start()

        # Mock `finalize_hazard_curves`
        general_path = 'openquake.engine.calculators.hazard.general'
        self.finalize_curves_patch = helpers.patch(
            '%s.BaseHazardCalculator.finalize_hazard_curves'
            % general_path)
        self.finalize_curves_mock = self.finalize_curves_patch.start()
Example #14
0
 def setUp(self):
     self.monitor_patch = patch(
         "openquake.engine.utils.monitor.count_failed_nodes")
     self.stats_patch = patch(
         "openquake.engine.utils.stats.get_progress_timing_data")
     self.monitor_mock = self.monitor_patch.start()
     self.stats_mock = self.stats_patch.start()
Example #15
0
    def test_job_launch_calls_record_initial_stats(self):
        '''When a job is launched, make sure that
        :py:method:`openquake.engine.JobContext._record_initial_stats`
        is called.
        '''
        # Mock out pieces of the test job so it doesn't actually run.
        eb_haz_calc = ('openquake.calculators.hazard.event_based.core'
                       '.EventBasedHazardCalculator')
        eb_risk_calc = ('openquake.calculators.risk.event_based.core'
                       '.EventBasedRiskCalculator')
        methods = ('initialize', 'pre_execute', 'execute', 'post_execute')

        haz_patchers = [patch('%s.%s' % (eb_haz_calc, m)) for m in methods]
        risk_patchers = [patch('%s.%s' % (eb_risk_calc, m)) for m in methods]

        for p in haz_patchers:
            p.start()
        for p in risk_patchers:
            p.start()

        try:
            record = 'openquake.engine.JobContext._record_initial_stats'

            with patch(record) as record_mock:
                engine._launch_job(
                    self.eb_job, ['general', 'HAZARD', 'RISK'])

                self.assertEqual(1, record_mock.call_count)
        finally:
            for p in haz_patchers:
                p.stop()
            for p in risk_patchers:
                p.stop()
Example #16
0
    def test_compute_uhs_task_pi(self):
        # Test that progress indicators are working properly for
        # `compute_uhs_task`.

        # Mock out the two 'heavy' functions called by this task;
        # we don't need to do these and we don't want to waste the cycles.
        cmpt_uhs = '%s.%s' % (self.UHS_CORE_MODULE, 'compute_uhs')
        write_uhs_data = '%s.%s' % (self.UHS_CORE_MODULE,
                                    'write_uhs_spectrum_data')
        with helpers.patch(cmpt_uhs):
            with helpers.patch(write_uhs_data):

                get_counter = lambda: stats.get_counter(
                    self.job_id, 'h', 'compute_uhs_task', 'i')

                # First, check that the counter for `compute_uhs_task` is
                # `None`:
                self.assertIsNone(get_counter())

                realization = 0
                site = Site(0.0, 0.0)
                # execute the task as a plain old function
                compute_uhs_task(self.job_id, realization, site)
                self.assertEqual(1, get_counter())

                compute_uhs_task(self.job_id, realization, site)
                self.assertEqual(2, get_counter())
Example #17
0
    def test_celery_task(self):
        # Test that the celery task when called properly call the
        # specific method to write loss curves

        base_path = 'openquake.engine.calculators.risk.writers'
        patches = [
            helpers.patch('%s.loss_curve' % base_path),
            helpers.patch('%s.event_loss_curve' % base_path)
        ]

        try:
            mocked_loss_writer, mocked_event_loss_writer = [
                p.start() for p in patches
            ]

            event_based.event_based(*self.calculator.task_arg_gen(
                self.calculator.block_size()).next())

            # we expect 1 asset being filtered out by the region
            # constraint, so there are only four loss curves (2 of them
            # are insured) to be written
            self.assertEqual(0, mocked_loss_writer.call_count)
            self.assertEqual(2, mocked_event_loss_writer.call_count)
        finally:
            [p.stop() for p in patches]
Example #18
0
    def test_pre_execute(self):
        # Most of the pre-execute functionality is implement in other methods.
        # For this test, just make sure each method gets called.
        base_path = ('openquake.engine.calculators.hazard.classical.core'
                     '.ClassicalHazardCalculator')
        init_src_patch = helpers.patch('%s.%s' %
                                       (base_path, 'initialize_sources'))
        init_rlz_patch = helpers.patch('%s.%s' %
                                       (base_path, 'initialize_realizations'))
        record_stats_patch = helpers.patch('%s.%s' %
                                           (base_path, 'record_init_stats'))
        init_pr_data_patch = helpers.patch('%s.%s' %
                                           (base_path, 'initialize_pr_data'))
        patches = (init_src_patch, init_rlz_patch, record_stats_patch,
                   init_pr_data_patch)

        mocks = [p.start() for p in patches]

        self.calc.pre_execute()

        # make sure the site_collection is loaded:
        self.assertIsNotNone(self.calc.hc.site_collection)

        for i, m in enumerate(mocks):
            self.assertEqual(1, m.call_count)
            m.stop()
            patches[i].stop()
Example #19
0
    def test_job_launch_calls_record_initial_stats(self):
        '''When a job is launched, make sure that
        :py:method:`openquake.engine.JobContext._record_initial_stats`
        is called.
        '''
        # Mock out pieces of the test job so it doesn't actually run.
        eb_haz_calc = ('openquake.calculators.hazard.event_based.core'
                       '.EventBasedHazardCalculator')
        eb_risk_calc = ('openquake.calculators.risk.event_based.core'
                       '.EventBasedRiskCalculator')
        methods = ('initialize', 'pre_execute', 'execute', 'post_execute')

        haz_patchers = [patch('%s.%s' % (eb_haz_calc, m)) for m in methods]
        risk_patchers = [patch('%s.%s' % (eb_risk_calc, m)) for m in methods]

        for p in haz_patchers:
            p.start()
        for p in risk_patchers:
            p.start()

        try:
            record = 'openquake.engine.JobContext._record_initial_stats'

            with patch(record) as record_mock:
                engine._launch_job(
                    self.eb_job, ['general', 'HAZARD', 'RISK'])

                self.assertEqual(1, record_mock.call_count)
        finally:
            for p in haz_patchers:
                p.stop()
            for p in risk_patchers:
                p.stop()
Example #20
0
    def test_cleanup_after_job(self):
        with patch('openquake.engine.kvs.cache_gc') as cache_gc:
            with patch('openquake.engine.supervising.supervisor.'
                       '_get_task_ids') as gti:
                with patch('celery.task.control.revoke') as revoke:
                    gti.return_value = ['task-id-1', 'task-id-2']

                    supervisor.cleanup_after_job(self.job.id, terminate=True)

                    self.assertEqual(1, cache_gc.call_count)
                    self.assertEqual(((self.job.id, ), {}), cache_gc.call_args)

                    self.assertEqual(1, gti.call_count)
                    self.assertEqual(((self.job.id, ), {}), gti.call_args)

                    self.assertEqual(2, revoke.call_count)
                    exp_revoke_args = [(('task-id-1',), {'terminate': True}),
                                       (('task-id-2',), {'terminate': True})]
                    self.assertEqual(exp_revoke_args, revoke.call_args_list)

                with patch('celery.task.control.revoke') as revoke:
                    gti.return_value = ['task-id-1', 'task-id-2']

                    supervisor.cleanup_after_job(self.job.id, terminate=False)

                    self.assertEqual(2, cache_gc.call_count)
                    self.assertEqual(((self.job.id, ), {}), cache_gc.call_args)

                    self.assertEqual(2, gti.call_count)
                    self.assertEqual(((self.job.id, ), {}), gti.call_args)

                    self.assertEqual(2, revoke.call_count)
                    exp_revoke_args = [(('task-id-1',), {'terminate': False}),
                                       (('task-id-2',), {'terminate': False})]
                    self.assertEqual(exp_revoke_args, revoke.call_args_list)
Example #21
0
    def test_failed_job_lifecycle(self):

        def test_status_running_and_fail(*args):
            self.assertEqual('running', self._calculation_status())

            raise Exception('OMG!')

        def patch_job_launch(*args, **kwargs):
            self.job = self.job_from_file(*args, **kwargs)

            self.assertEqual('pending', self._calculation_status())

            return self.job

        before_launch = engine._launch_job
        try:
            engine._launch_job = mock.Mock(
                side_effect=test_status_running_and_fail)

            with patch('openquake.engine._job_from_file') as from_file:
                from_file.side_effect = patch_job_launch

                with patch('os.fork', mocksignature=False) as fork:
                    fork.return_value = 0
                    self.assertRaises(Exception, engine.run_job,
                                      self.job, self.params, self.sections)

            self.assertEqual(1, engine._launch_job.call_count)
            self.assertEqual('failed', self._calculation_status())
        finally:
            engine._launch_job = before_launch
Example #22
0
    def test_pre_execute(self):
        # Most of the pre-execute functionality is implement in other methods.
        # For this test, just make sure each method gets called.
        path = ('openquake.engine.calculators.risk.general.BaseRiskCalculator')
        patches = (
            helpers.patch(
                '%s.%s' % (path, '_store_exposure')),
            helpers.patch(
                '%s.%s' % (path, 'set_risk_models')),
            helpers.patch(
                '%s.%s' % (path, '_initialize_progress')))

        mocks = [p.start() for p in patches]

        mocks[0].return_value = mock.Mock()
        mocks[0].return_value.taxonomies_in.return_value = {'RC': 10}

        self.calculator.imt = 'PGA'
        self.calculator.pre_execute()

        for i, m in enumerate(mocks):
            self.assertEqual(1, m.call_count,
                             "mock %d has not been called" % (i + 1))
            m.stop()
            patches[i].stop()
Example #23
0
    def test_compute_uhs_with_site_model(self):
        the_job = helpers.prepare_job_context(
            helpers.demo_file('uhs/config_with_site_model.gem'))
        the_job.to_kvs()

        site = Site(0, 0)

        helpers.store_hazard_logic_trees(the_job)

        get_sm_patch = helpers.patch(
            'openquake.calculators.hazard.general.get_site_model')
        get_closest_patch = helpers.patch(
            'openquake.calculators.hazard.general.get_closest_site_model_data')
        compute_patch = helpers.patch(
            'openquake.calculators.hazard.uhs.core._compute_uhs')

        get_sm_mock = get_sm_patch.start()
        get_closest_mock = get_closest_patch.start()
        compute_mock = compute_patch.start()

        get_closest_mock.return_value = SiteModel(
            vs30=800, vs30_type='measured', z1pt0=100, z2pt5=200)
        try:
            compute_uhs(the_job, site)

            self.assertEqual(1, get_sm_mock.call_count)
            self.assertEqual(1, get_closest_mock.call_count)
            self.assertEqual(1, compute_mock.call_count)
        finally:
            get_sm_patch.stop()
            get_closest_patch.stop()
            compute_patch.stop()
Example #24
0
 def setUp(self):
     self.monitor_patch = patch(
         "openquake.utils.monitor.count_failed_nodes")
     self.stats_patch = patch(
         "openquake.utils.stats.get_progress_timing_data")
     self.monitor_mock = self.monitor_patch.start()
     self.stats_mock = self.stats_patch.start()
Example #25
0
    def test_compute_uhs_task_pi(self):
        # Test that progress indicators are working properly for
        # `compute_uhs_task`.

        # Mock out the two 'heavy' functions called by this task;
        # we don't need to do these and we don't want to waste the cycles.
        cmpt_uhs = '%s.%s' % (self.UHS_CORE_MODULE, 'compute_uhs')
        write_uhs_data = '%s.%s' % (self.UHS_CORE_MODULE,
                                    'write_uhs_spectrum_data')
        with helpers.patch(cmpt_uhs):
            with helpers.patch(write_uhs_data):

                get_counter = lambda: stats.get_counter(
                    self.job_id, 'h', 'compute_uhs_task', 'i')

                # First, check that the counter for `compute_uhs_task` is
                # `None`:
                self.assertIsNone(get_counter())

                realization = 0
                site = Site(0.0, 0.0)
                # execute the task as a plain old function
                compute_uhs_task(self.job_id, realization, site)
                self.assertEqual(1, get_counter())

                compute_uhs_task(self.job_id, realization, site)
                self.assertEqual(2, get_counter())
Example #26
0
    def test_job_launch_calls_record_initial_stats(self):
        """When a job is launched, make sure that
        :py:method:`openquake.engine.JobContext._record_initial_stats`
        is called.
        """
        # Mock out pieces of the test job so it doesn't actually run.
        eb_haz_calc = "openquake.calculators.hazard.event_based.core" ".EventBasedHazardCalculator"
        eb_risk_calc = "openquake.calculators.risk.event_based.core" ".EventBasedRiskCalculator"
        methods = ("initialize", "pre_execute", "execute", "post_execute")

        haz_patchers = [patch("%s.%s" % (eb_haz_calc, m)) for m in methods]
        risk_patchers = [patch("%s.%s" % (eb_risk_calc, m)) for m in methods]

        for p in haz_patchers:
            p.start()
        for p in risk_patchers:
            p.start()

        try:
            record = "openquake.engine.JobContext._record_initial_stats"

            with patch(record) as record_mock:
                engine._launch_job(self.eb_job, ["general", "HAZARD", "RISK"])

                self.assertEqual(1, record_mock.call_count)
        finally:
            for p in haz_patchers:
                p.stop()
            for p in risk_patchers:
                p.stop()
    def test_loss_map_not_serialized_unless_conditional_loss_poes(self):
        with patch('openquake.risk.job.probabilistic'
                   '.aggregate_loss_curve.plot_aggregate_curve'):
            with patch('openquake.output.risk.create_loss_map_writer') as clw:
                clw.return_value = None

                self.mixin.execute()
                self.assertFalse(clw.called)
    def test_loss_map_serialized_if_conditional_loss_poes(self):
        self.mixin.params['CONDITIONAL_LOSS_POE'] = '0.01 0.02'

        with patch('openquake.risk.job.probabilistic'
                   '.aggregate_loss_curve.plot_aggregate_curve'):
            with patch('openquake.output.risk.create_loss_map_writer') as clw:
                clw.return_value = None

                self.mixin.execute()
                self.assertTrue(clw.called)
    def test_loss_map_not_serialized_unless_conditional_loss_poes(self):
        with helpers.patch('openquake.calculators.risk.event_based.core'
                           '.plot_aggregate_curve'):
            with helpers.patch(
                'openquake.output.risk.create_loss_map_writer') as clw:

                clw.return_value = None

                self.calculator.execute()
                self.assertFalse(clw.called)
Example #30
0
 def test_jvm_memmax_setting_is_not_passed(self):
     """Do not pass -Xmx to the jvm."""
     with helpers.patch("jpype.startJVM") as startjvm_mock:
         with helpers.patch("jpype.isJVMStarted") as isjvmstarted_mock:
             # Make sure that startJVM() gets called.
             isjvmstarted_mock.side_effect = lambda: False
             with helpers.patch("openquake.java.init_logs"):
                 java.jvm()
                 args, _ = startjvm_mock.call_args
                 self.assertFalse(filter(lambda a: a.startswith("-Xmx"), args))
    def test_loss_map_serialized_if_conditional_loss_poes(self):
        self.calculator.calc_proxy.params['CONDITIONAL_LOSS_POE'] = (
            '0.01 0.02')

        with helpers.patch('openquake.calculators.risk.event_based.core'
                           '.plot_aggregate_curve'):
            with helpers.patch(
                'openquake.output.risk.create_loss_map_writer') as clw:

                clw.return_value = None

                self.calculator.execute()
                self.assertTrue(clw.called)
 def test_init_session_updates_internal_dict(self):
     """
     _init_session() will add newly created sessions to the internal
     `__sessions__` dictionary.
     """
     session = object()
     sc = SessionCache()
     with patch('sqlalchemy.create_engine') as ce_mock:
         with patch('sqlalchemy.orm.sessionmaker') as sm_mock:
             sm_mock.return_value = lambda: session
             self.assertTrue(sc.__sessions__.get("usr8") is None)
             sc._init_session("usr8", "t0ps3cr3t")
             self.assertEqual(session, sc.__sessions__.get("usr8"))
Example #33
0
    def test(self):
        # check that if risk models are provided, then the ``points to
        # compute`` and the imls are got from there

        username = helpers.default_user().user_name

        job = engine.prepare_job(username)

        cfg = helpers.get_data_path('classical_job-sd-imt.ini')
        params, files = engine.parse_config(open(cfg, 'r'))

        haz_calc = engine.create_hazard_calculation(
            job.owner.user_name, params, files)
        haz_calc = models.HazardCalculation.objects.get(id=haz_calc.id)
        job.hazard_calculation = haz_calc
        job.is_running = True
        job.save()

        base_path = ('openquake.engine.calculators.hazard.classical.core'
                     '.ClassicalHazardCalculator')
        init_src_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_sources'))
        init_sm_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_site_model'))
        init_rlz_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_realizations'))
        record_stats_patch = helpers.patch(
            '%s.%s' % (base_path, 'record_init_stats'))
        init_pr_data_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_pr_data'))
        patches = (init_src_patch, init_sm_patch, init_rlz_patch,
                   record_stats_patch, init_pr_data_patch)

        mocks = [p.start() for p in patches]

        get_calculator_class(
            'hazard',
            job.hazard_calculation.calculation_mode)(job).pre_execute()

        self.assertEqual([(1.0, -1.0), (0.0, 0.0)],
                         [(point.latitude, point.longitude)
                          for point in haz_calc.points_to_compute()])
        self.assertEqual(['PGA'], haz_calc.get_imts())

        self.assertEqual(3, haz_calc.exposure_model.exposuredata_set.count())

        for i, m in enumerate(mocks):
            m.stop()
            patches[i].stop()

        return job
Example #34
0
    def test(self):
        # check that if risk models are provided, then the ``points to
        # compute`` and the imls are got from there

        username = helpers.default_user()

        job = engine.prepare_job(username)

        cfg = helpers.get_data_path('classical_job-sd-imt.ini')
        params = engine.parse_config(open(cfg, 'r'))

        haz_calc = engine.create_calculation(models.HazardCalculation, params)
        haz_calc = models.HazardCalculation.objects.get(id=haz_calc.id)
        job.hazard_calculation = haz_calc
        job.is_running = True
        job.save()

        base_path = ('openquake.engine.calculators.hazard.classical.core'
                     '.ClassicalHazardCalculator')
        init_src_patch = helpers.patch('%s.%s' %
                                       (base_path, 'initialize_sources'))
        init_sm_patch = helpers.patch('%s.%s' %
                                      (base_path, 'initialize_site_model'))
        init_rlz_patch = helpers.patch('%s.%s' %
                                       (base_path, 'initialize_realizations'))
        record_stats_patch = helpers.patch('%s.%s' %
                                           (base_path, 'record_init_stats'))
        init_pr_data_patch = helpers.patch('%s.%s' %
                                           (base_path, 'initialize_pr_data'))
        patches = (init_src_patch, init_sm_patch, init_rlz_patch,
                   record_stats_patch, init_pr_data_patch)

        mocks = [p.start() for p in patches]

        get_calculator_class(
            'hazard',
            job.hazard_calculation.calculation_mode)(job).pre_execute()

        self.assertEqual([(1.0, -1.0), (0.0, 0.0)],
                         [(point.latitude, point.longitude)
                          for point in haz_calc.points_to_compute()])
        self.assertEqual(['PGA'], haz_calc.get_imts())

        self.assertEqual(3,
                         haz_calc.oqjob.exposuremodel.exposuredata_set.count())

        for i, m in enumerate(mocks):
            m.stop()
            patches[i].stop()

        return job
Example #35
0
 def test_imt_validation(self):
     # Test the validation of the imt associated with the
     # vulnerability model that must match the one of the hazard
     # output.
     base_path = ('openquake.engine.calculators.risk.general.'
                  'BaseRiskCalculator.')
     patches = [helpers.patch(base_path + 'set_risk_models'),
                helpers.patch(base_path + '_store_exposure')]
     for patch in patches:
         patch.start()
     self.calculator.imt = 'Hope'
     self.assertRaises(RuntimeError, self.calculator.pre_execute)
     for patch in patches:
         patch.stop()
Example #36
0
    def test_jvm_memmax_setting_is_enforced(self):
        """The `-Xmx` property is passed to the JVM."""
        with helpers.patch("jpype.startJVM") as startjvm_mock:
            with helpers.patch("jpype.isJVMStarted") as isjvmstarted_mock:
                # Make sure that startJVM() gets called.

                def side_effect():
                    isjvmstarted_mock.side_effect = lambda: True
                    return False

                isjvmstarted_mock.side_effect = side_effect
                java.jvm()
                args, _ = startjvm_mock.call_args
                self.assertTrue(
                    filter(lambda a: a.startswith("-Xmx"), args))
Example #37
0
    def test_asset_losses_per_site(self):
        mm = mock.MagicMock(spec=redis.Redis)
        mm.get.return_value = 0.123
        with helpers.patch('openquake.kvs.get_client') as mgc:
            mgc.return_value = mm

            def coords(item):
                return item[0].coords

            expected = [(shapes.Site(10.0, 10.0), [({
                'value': 0.123
            }, GRID_ASSETS[(0, 0)])]),
                        (shapes.Site(10.1, 10.0), [({
                            'value': 0.123
                        }, GRID_ASSETS[(0, 1)])]),
                        (shapes.Site(10.0, 10.1), [({
                            'value': 0.123
                        }, GRID_ASSETS[(1, 0)])]),
                        (shapes.Site(10.1, 10.1), [({
                            'value': 0.123
                        }, GRID_ASSETS[(1, 1)])])]

            calculator = general.BaseRiskCalculator(self.job_ctxt)
            actual = calculator.asset_losses_per_site(0.5, self.grid_assets)
            expected = sorted(expected, key=coords)
            actual = sorted(actual, key=coords)

            self.assertEqual(expected, actual)
Example #38
0
    def test_compute_uhs_task_pi_failure_counter(self):
        # Same as the previous test, except that we want to make sure task
        # failure counters are properly incremented if a task fails.

        cmpt_uhs = '%s.%s' % (self.UHS_CORE_MODULE, 'compute_uhs')
        with helpers.patch(cmpt_uhs) as compute_mock:

            # We want to force a failure to occur in the task:
            compute_mock.side_effect = RuntimeError('Mock exception')

            get_counter = lambda: stats.get_counter(
                self.job_id, 'h', 'compute_uhs_task-failures', 'i')

            # The counter should start out empty:
            self.assertIsNone(get_counter())

            # tasks_args: job_id, realization, site
            task_args = (self.job_id, 0, Site(0.0, 0.0))
            self.assertRaises(RuntimeError, compute_uhs_task, *task_args)
            self.assertEqual(1, get_counter())

            # Create two more failures:
            self.assertRaises(RuntimeError, compute_uhs_task, *task_args)
            self.assertRaises(RuntimeError, compute_uhs_task, *task_args)
            self.assertEqual(3, get_counter())
Example #39
0
 def test_get_with_unknown_key(self):
     """config.get() returns `None` if the `key` is not known."""
     with patch('openquake.engine.utils.config.get_section') as mock:
         mock.return_value = dict(b=1)
         self.assertTrue(config.get("arghh", "c") is None)
         self.assertEqual(1, mock.call_count)
         self.assertEqual([("arghh", ), {}], mock.call_args)
Example #40
0
 def test_get_with_empty_section_data(self):
     """config.get() returns `None` if the section data dict is empty."""
     with patch('openquake.engine.utils.config.get_section') as mock:
         mock.return_value = dict()
         self.assertTrue(config.get("whatever", "key") is None)
         self.assertEqual(1, mock.call_count)
         self.assertEqual([("whatever", ), {}], mock.call_args)
    def test_the_hazard_subsystem_stores_gmfs_for_all_the_sites(self):
        """The hazard subsystem stores the computed gmfs in kvs.

        For each site in the region, a ground motion value is store
        in the underlying kvs system.
        """

        det.DeterministicEventBasedMixin.compute_ground_motion_field = \
            compute_ground_motion_field

        # KVS garbage collection is going to be called asynchronously by the
        # job. We don't actually want that to happen in this test.
        with patch('subprocess.Popen'):

            self.job.launch()
            decoder = json.JSONDecoder()

            for site in self.job.sites_to_compute():
                point = self.grid.point_at(site)
                key = kvs.tokens.ground_motion_values_key(
                    self.job.job_id, point)

                # just one calculation is triggered in this test case
                print "key is %s" % key
                self.assertEqual(1, self.kvs_client.llen(key))
                gmv = decoder.decode(self.kvs_client.lpop(key))
                self.assertEqual(0, self.kvs_client.llen(key))

                self.assertTrue(
                    numpy.allclose(site.latitude, gmv["site_lat"]))

                self.assertTrue(
                    numpy.allclose(site.longitude, gmv["site_lon"]))
Example #42
0
    def test_actions_after_a_critical_message(self):
        # the job process is running
        self.is_pid_running.return_value = True

        with patch('openquake.supervising.' \
                   'supervisor.SupervisorLogMessageConsumer.run') as run:

            def run_(mc):
                record = logging.LogRecord('oq.job.123', logging.CRITICAL,
                                           'path', 42, 'a msg', (), None)
                mc.log_callback(record)
                assert mc._stopped

            # the supervisor will receive a msg
            run.side_effect = run_

            supervisor.supervise(1, 123, timeout=0.1)

            # the job process is terminated
            self.assertEqual(1, self.terminate_job.call_count)
            self.assertEqual(((1,), {}), self.terminate_job.call_args)

            # stop time is recorded
            self.assertEqual(1, self.record_job_stop_time.call_count)
            self.assertEqual(((123,), {}), self.record_job_stop_time.call_args)

            # the cleanup is triggered
            self.assertEqual(1, self.cleanup_after_job.call_count)
            self.assertEqual(((123,), {}), self.cleanup_after_job.call_args)

            # the status in the job record is updated
            self.assertEqual(1,
                             self.update_job_status_and_error_msg.call_count)
            self.assertEqual(((123, 'failed', 'a msg'), {}),
                             self.update_job_status_and_error_msg.call_args)
Example #43
0
 def test_pk_get_with_non_existent_debug_key(self):
     """`KeyError` is raised for debug keys that are not in `STATS_KEYS`."""
     job_id = 96
     pkey = "Not a key!?"
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = False
         self.assertRaises(KeyError, stats.pk_get, job_id, pkey)
Example #44
0
    def setUpClass(cls):
        class OQJP(object):
            no_progress_timeout = 3601

        cls.db_patch = helpers.patch("openquake.db.models.profile4job")
        cls.db_mock = cls.db_patch.start()
        cls.db_mock.return_value = OQJP()
Example #45
0
    def test__serialize_gmf(self):
        # GMFs are serialized as expected.
        location1 = java.jclass("Location")(1.0, 2.0)
        location2 = java.jclass("Location")(1.1, 2.1)
        site1 = java.jclass("Site")(location1)
        site2 = java.jclass("Site")(location2)
        hashmap = java.jclass("HashMap")()
        hashmap.put(site1, 0.1)
        hashmap.put(site2, 0.2)

        self.job_ctxt.params[NUMBER_OF_CALC_KEY] = "2"
        self.job_ctxt.params["SAVE_GMFS"] = "true"
        self.job_ctxt.params["REGION_VERTEX"] = ("0.0, 0.0, 0.0, 3.0, "
                                                 "3.0, 3.0, 3.0, 0.0")
        self.job_profile.region = GEOSGeometry(
            shapes.polygon_ewkt_from_coords(
                '0.0, 0.0, 0.0, 3.0, 3.0, 3.0, 3.0, 0.0'))
        self.job_profile.gmf_calculation_number = 2
        self.job_profile.save()

        calculator = scenario.ScenarioHazardCalculator(self.job_ctxt)

        with patch('openquake.calculators.hazard.scenario.core'
                   '.ScenarioHazardCalculator'
                   '.compute_ground_motion_field') as compute_gmf_mock:
            # the return value needs to be a Java HashMap
            compute_gmf_mock.return_value = hashmap
            calculator.execute()

        patht = os.path.join(self.job_ctxt.base_path,
                             self.job_ctxt['OUTPUT_DIR'], "gmf-%s.xml")
        for cnum in range(self.job_profile.gmf_calculation_number):
            path = patht % cnum
            self.assertTrue(os.path.isfile(path),
                            "GMF file not found (%s)" % path)
Example #46
0
 def test_get_with_unknown_key(self):
     """config.get() returns `None` if the `key` is not known."""
     with patch('openquake.utils.config.get_section') as mock:
         mock.return_value = dict(b=1)
         self.assertTrue(config.get("arghh", "c") is None)
         self.assertEqual(1, mock.call_count)
         self.assertEqual([("arghh",), {}], mock.call_args)
Example #47
0
    def test_calculator_for_task(self):
        """Load up a sample calculation (into the db and cache) and make sure
        we can instantiate the correct calculator for a given calculation id.
        """
        from openquake.calculators.hazard.classical.core import (
            ClassicalHazardCalculator)
        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            demo_file('simple_fault_demo_hazard/config.gem'), job)

        job_ctxt = engine.JobContext(params,
                                     job.id,
                                     oq_job_profile=job_profile,
                                     oq_job=job)
        job_ctxt.to_kvs()

        with patch('openquake.utils.tasks.get_running_job') as grc_mock:

            # Loading of the JobContext is done by
            # `get_running_job`, which is covered by other tests.
            # So, we just want to make sure that it's called here.
            grc_mock.return_value = job_ctxt

            calculator = tasks.calculator_for_task(job.id, 'hazard')

            self.assertTrue(isinstance(calculator, ClassicalHazardCalculator))
            self.assertEqual(1, grc_mock.call_count)
Example #48
0
    def test_actions_after_a_critical_message(self):
        # the job process is running
        self.is_pid_running.return_value = True

        with patch("openquake.supervising." "supervisor.SupervisorLogMessageConsumer.run") as run:

            def run_(mc):
                record = logging.LogRecord("oq.job.123", logging.CRITICAL, "path", 42, "a msg", (), None)
                mc.log_callback(record)
                assert mc._stopped

            # the supervisor will receive a msg
            run.side_effect = run_

            supervisor.supervise(1, 123, timeout=0.1)

            # the job process is terminated
            self.assertEqual(1, self.terminate_job.call_count)
            self.assertEqual(((1,), {}), self.terminate_job.call_args)

            # stop time is recorded
            self.assertEqual(1, self.record_job_stop_time.call_count)
            self.assertEqual(((123,), {}), self.record_job_stop_time.call_args)

            # the cleanup is triggered
            self.assertEqual(1, self.cleanup_after_job.call_count)
            self.assertEqual(((123,), {}), self.cleanup_after_job.call_args)

            # the status in the job record is updated
            self.assertEqual(1, self.update_job_status_and_error_msg.call_count)
            self.assertEqual(((123, "failed", "a msg"), {}), self.update_job_status_and_error_msg.call_args)
Example #49
0
 def test_get_with_empty_section_data(self):
     """config.get() returns `None` if the section data dict is empty."""
     with patch('openquake.utils.config.get_section') as mock:
         mock.return_value = dict()
         self.assertTrue(config.get("whatever", "key") is None)
         self.assertEqual(1, mock.call_count)
         self.assertEqual([("whatever",), {}], mock.call_args)
Example #50
0
    def test_initialize_site_model_no_site_model(self):
        patch_path = 'openquake.engine.calculators.hazard.general.\
store_site_model'
        with helpers.patch(patch_path) as store_sm_patch:
            self.calc.initialize_site_model()
            # We should never try to store a site model in this case.
            self.assertEqual(0, store_sm_patch.call_count)
Example #51
0
    def test_loss_map_not_serialized_unless_conditional_loss_poes(self):
        with helpers.patch(
            'openquake.output.risk.create_loss_map_writer') as clw:

            clw.return_value = None

            self.calculator.execute()
            self.assertFalse(clw.called)
Example #52
0
    def test_compute_uhs_task_calls_compute_and_write(self):
        # The celery task `compute_uhs_task` basically just calls a few other
        # functions to do the calculation and write results. Those functions
        # have their own test coverage; in this test, we just want to make
        # sure they get called.

        cmpt_uhs = '%s.%s' % (self.UHS_CORE_MODULE, 'compute_uhs')
        write_uhs_data = '%s.%s' % (self.UHS_CORE_MODULE,
                                    'write_uhs_spectrum_data')
        with helpers.patch(cmpt_uhs) as compute_mock:
            with helpers.patch(write_uhs_data) as write_mock:
                # Call the function under test as a normal function, not a
                # @task:
                compute_uhs_task(self.job_id, 0, Site(0.0, 0.0))

                self.assertEqual(1, compute_mock.call_count)
                self.assertEqual(1, write_mock.call_count)
Example #53
0
 def test_pk_inc_with_non_existent_debug_key(self):
     """`KeyError` is raised for debug keys that are not in `STATS_KEYS`."""
     job_id = 86
     pkey = "How hard can it be!?"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = False
         self.assertRaises(KeyError, stats.pk_inc, job_id, pkey)
Example #54
0
 def setUp(self):
     self.job_from_file = engine._job_from_file
     self.init_logs_amqp_send = patch('openquake.logs.init_logs_amqp_send')
     self.init_logs_amqp_send.start()
     self.job = engine.prepare_job()
     self.job_profile, self.params, self.sections = (
         engine.import_job_profile(helpers.get_data_path(CONFIG_FILE),
                                   self.job))
Example #55
0
    def test_initialize_site_model_no_site_model(self):
        patch_path = 'openquake.engine.calculators.hazard.general.\
store_site_model'

        with helpers.patch(patch_path) as store_sm_patch:
            self.calc.initialize_site_model()
            # We should never try to store a site model in this case.
            self.assertEqual(0, store_sm_patch.call_count)
Example #56
0
 def test_not_configured(self):
     """
     The hazard block size was not set in openquake.cfg, the default
     is returned.
     """
     with patch("openquake.engine.utils.config.get") as mget:
         mget.return_value = None
         self.assertEqual(8192, config.hazard_block_size())
Example #57
0
 def test_not_configured_default_overriden(self):
     """
     The hazard block size was not set in openquake.cfg, the default
     is specified by the caller is returned.
     """
     with patch("openquake.engine.utils.config.get") as mget:
         mget.return_value = None
         self.assertEqual(333, config.hazard_block_size(333))
Example #58
0
 def test_configuration_invalid(self):
     """
     The hazard block size *was* configured in openquake.cfg but
     the setting is not a valid number.
     """
     with patch("openquake.engine.utils.config.get") as mget:
         mget.return_value = "not a number"
         self.assertRaises(ValueError, config.hazard_block_size)