def _create_job_profiles(self, user_name):
        uhs_cfg = helpers.demo_file('uhs/config.gem')
        job = engine.prepare_job()
        self.uhs_jp, _, _ = engine.import_job_profile(uhs_cfg, job,
                                               user_name=user_name)

        cpsha_cfg = helpers.demo_file('classical_psha_based_risk/config.gem')
        job = engine.prepare_job()
        self.cpsha_jp, _, _ = engine.import_job_profile(cpsha_cfg, job,
                                                 user_name=user_name)
示例#2
0
    def _create_job_profiles(self, user_name):
        uhs_cfg = helpers.demo_file('uhs/config.gem')
        job = engine.prepare_job()
        self.uhs_jp, _, _ = engine.import_job_profile(uhs_cfg,
                                                      job,
                                                      user_name=user_name)

        cpsha_cfg = helpers.demo_file('classical_psha_based_risk/config.gem')
        job = engine.prepare_job()
        self.cpsha_jp, _, _ = engine.import_job_profile(cpsha_cfg,
                                                        job,
                                                        user_name=user_name)
 def test__db_cnode_status_and_two_jobs(self):
     job1 = engine.prepare_job()
     for node, status in [("P1", "up"), ("P2", "down"), ("P3", "down")]:
         ns = models.CNodeStats(oq_job=job1, node=node,
                                current_status=status)
         ns.save(using="job_superv")
     job2 = engine.prepare_job()
     expected = {}
     for node, status in [("Q2", "down"), ("Q3", "down")]:
         ns = models.CNodeStats(oq_job=job2, node=node,
                                current_status=status)
         ns.save(using="job_superv")
         expected[node] = ns
     self.assertEqual(expected, monitor._db_cnode_status(job2.id))
    def test_calculator_for_task(self):
        """Load up a sample calculation (into the db and cache) and make sure
        we can instantiate the correct calculator for a given calculation id.
        """
        from openquake.calculators.hazard.classical.core import (
            ClassicalHazardCalculator)
        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(demo_file(
            'simple_fault_demo_hazard/config.gem'), job)

        job_ctxt = engine.JobContext(params, job.id,
                                             oq_job_profile=job_profile,
                                             oq_job=job)
        job_ctxt.to_kvs()

        with patch(
            'openquake.utils.tasks.get_running_job') as grc_mock:

            # Loading of the JobContext is done by
            # `get_running_job`, which is covered by other tests.
            # So, we just want to make sure that it's called here.
            grc_mock.return_value = job_ctxt

            calculator = tasks.calculator_for_task(job.id, 'hazard')

            self.assertTrue(isinstance(calculator, ClassicalHazardCalculator))
            self.assertEqual(1, grc_mock.call_count)
示例#5
0
    def setup_classic_job(cls, create_job_path=True, upload_id=None,
                          inputs=None):
        """Create a classic job with associated upload and inputs.

        :param bool create_job_path: if set the path for the job will be
            created and captured in the job record
        :param integer upload_id: if set use upload record with given db key.
        :param list inputs: a list of 2-tuples where the first and the second
            element are the input type and path respectively
        :returns: a :py:class:`db.models.OqJob` instance
        """
        assert upload_id is None  # temporary

        job = engine.prepare_job()
        oqjp = cls.setup_job_profile(job)
        models.Job2profile(oq_job=job, oq_job_profile=oqjp).save()

        # Insert input model files
        if inputs:
            insert_inputs(job, inputs)

        if create_job_path:
            job.path = os.path.join(tempfile.mkdtemp(), str(job.id))
            job.save()

            os.mkdir(job.path)
            os.chmod(job.path, 0777)

        return job
    def setUpClass(cls):
        cls.job = engine.prepare_job()
        jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job)

        cls.job_ctxt = helpers.create_job({}, job_id=cls.job.id,
                                          oq_job_profile=jp, oq_job=cls.job)
        calc = ClassicalRiskCalculator(cls.job_ctxt)

        calc.store_exposure_assets()
        [input] = models.inputs4job(cls.job.id, input_type="exposure")
        model = input.model()
        assets = model.exposuredata_set.filter(taxonomy="af/ctc-D/LR")
        # Add some more assets.
        coos = [(10.000155392289116, 46.546194318563),
                (10.222034128255, 46.0071299176413),
                (10.520376165581, 46.247463385278)]
        for lat, lon in coos:
            site = shapes.Site(lat, lon)
            cls.sites.append(site)
            if assets:
                continue
            location = geos.GEOSGeometry(site.point.to_wkt())
            asset = models.ExposureData(
                exposure_model=model, taxonomy="af/ctc-D/LR",
                asset_ref=helpers.random_string(6), stco=lat * 2,
                site=location, reco=1.1 * lon)
            asset.save()
示例#7
0
    def setUpClass(cls):
        cls.job = engine.prepare_job()
        jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job)

        cls.job_ctxt = helpers.create_job({},
                                          job_id=cls.job.id,
                                          oq_job_profile=jp,
                                          oq_job=cls.job)
        calc = ClassicalRiskCalculator(cls.job_ctxt)

        calc.store_exposure_assets()
        [input] = models.inputs4job(cls.job.id, input_type="exposure")
        model = input.model()
        assets = model.exposuredata_set.filter(taxonomy="af/ctc-D/LR")
        # Add some more assets.
        coos = [(10.000155392289116, 46.546194318563),
                (10.222034128255, 46.0071299176413),
                (10.520376165581, 46.247463385278)]
        for lat, lon in coos:
            site = shapes.Site(lat, lon)
            cls.sites.append(site)
            if assets:
                continue
            location = geos.GEOSGeometry(site.point.to_wkt())
            asset = models.ExposureData(exposure_model=model,
                                        taxonomy="af/ctc-D/LR",
                                        asset_ref=helpers.random_string(6),
                                        stco=lat * 2,
                                        site=location,
                                        reco=1.1 * lon)
            asset.save()
示例#8
0
    def test_calculator_for_task(self):
        """Load up a sample calculation (into the db and cache) and make sure
        we can instantiate the correct calculator for a given calculation id.
        """
        from openquake.calculators.hazard.classical.core import (
            ClassicalHazardCalculator)
        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            demo_file('simple_fault_demo_hazard/config.gem'), job)

        job_ctxt = engine.JobContext(params,
                                     job.id,
                                     oq_job_profile=job_profile,
                                     oq_job=job)
        job_ctxt.to_kvs()

        with patch('openquake.utils.tasks.get_running_job') as grc_mock:

            # Loading of the JobContext is done by
            # `get_running_job`, which is covered by other tests.
            # So, we just want to make sure that it's called here.
            grc_mock.return_value = job_ctxt

            calculator = tasks.calculator_for_task(job.id, 'hazard')

            self.assertTrue(isinstance(calculator, ClassicalHazardCalculator))
            self.assertEqual(1, grc_mock.call_count)
示例#9
0
    def setUp(self):
        # Test 'event-based' job
        cfg_path = helpers.testdata_path("simplecase/config.gem")

        self.job = engine.prepare_job()
        self.jp, self.params, self.sections = engine.import_job_profile(
            cfg_path, self.job)
    def setUpClass(cls):
        cls.job = engine.prepare_job()
        jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job)
        calc_proxy = helpers.create_job({}, job_id=cls.job.id,
                oq_job_profile=jp, oq_job=cls.job)

        # storing the basic exposure model
        ClassicalRiskCalculator(calc_proxy).store_exposure_assets()
        [input] = models.inputs4job(cls.job.id, input_type="exposure")
        model = input.model()
        assets = model.exposuredata_set.filter(taxonomy="aa/aatc-D/LR")

        if not assets:
            # This model did not exist in the database before.
            site = shapes.Site(1.0, 2.0)
            # more assets at same location
            models.ExposureData(
                exposure_model=model, taxonomy="aa/aatc-D/LR",
                asset_ref="ASSET_1", stco=1,
                site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save()

            models.ExposureData(
                exposure_model=model, taxonomy="aa/aatc-D/LR",
                asset_ref="ASSET_2", stco=1,
                site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save()

            site = shapes.Site(2.0, 2.0)
            # just one asset at location
            models.ExposureData(
                exposure_model=model, taxonomy="aa/aatc-D/LR",
                asset_ref="ASSET_3", stco=1,
                site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save()
示例#11
0
    def setUp(self):
        client = kvs.get_client()

        # Delete managed job id info so we can predict the job key
        # which will be allocated for us
        client.delete(kvs.tokens.CURRENT_JOBS)

        self.generated_files = []

        job = engine.prepare_job()
        jp, params, sections = import_job_profile(helpers.get_data_path(CONFIG_FILE), job)
        self.job_ctxt = JobContext(params, job.id, sections=sections, oq_job_profile=jp, oq_job=job)

        job = engine.prepare_job()
        jp, params, sections = import_job_profile(helpers.get_data_path(CONFIG_WITH_INCLUDES), job)
        self.job_ctxt_with_includes = JobContext(params, job.id, sections=sections, oq_job_profile=jp, oq_job=job)
示例#12
0
    def setUp(self):
        # Test 'event-based' job
        cfg_path = helpers.testdata_path("simplecase/config.gem")

        self.job = engine.prepare_job()
        self.jp, self.params, self.sections = engine.import_job_profile(
            cfg_path, self.job)
示例#13
0
    def setUp(self):
        kvs.get_client().flushall()

        base_path = helpers.testdata_path("scenario")
        job = engine.prepare_job()
        self.job_profile, self.params, self.sections = (
            engine.import_job_profile(SCENARIO_SMOKE_TEST, job))
        self.job_ctxt = JobContext(self.params,
                                   job.id,
                                   sections=self.sections,
                                   base_path=base_path,
                                   oq_job_profile=self.job_profile,
                                   oq_job=job)

        self.job_ctxt.params[NUMBER_OF_CALC_KEY] = "1"

        self.job_ctxt.params['SERIALIZE_RESULTS_TO'] = 'xml'
        self.job_ctxt.serialize_results_to = ["xml"]

        # saving the default java implementation
        self.default = (
            scenario.ScenarioHazardCalculator.compute_ground_motion_field)

        self.grid = self.job_ctxt.region.grid

        self.job_ctxt.to_kvs()
示例#14
0
    def setUpClass(cls):
        cls.job = engine.prepare_job()
        jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job)
        calc_proxy = helpers.create_job({}, job_id=cls.job.id,
                oq_job_profile=jp, oq_job=cls.job)

        # storing the basic exposure model
        ClassicalRiskCalculator(calc_proxy).store_exposure_assets()

        [em_input] = models.inputs4job(cls.job.id, input_type="exposure")
        [model] = em_input.exposuremodel_set.all()

        site = shapes.Site(1.0, 2.0)

        # more assets at same location
        models.ExposureData(
            exposure_model=model, taxonomy="NOT_USED",
            asset_ref="ASSET_1", stco=1,
            site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save()

        models.ExposureData(
            exposure_model=model, taxonomy="NOT_USED",
            asset_ref="ASSET_2", stco=1,
            site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save()

        site = shapes.Site(2.0, 2.0)

        # just one asset at location
        models.ExposureData(
            exposure_model=model, taxonomy="NOT_USED",
            asset_ref="ASSET_3", stco=1,
            site=geos.GEOSGeometry(site.point.to_wkt()), reco=1).save()
示例#15
0
 def test__db_cnode_status_and_two_jobs(self):
     job1 = engine.prepare_job()
     for node, status in [("P1", "up"), ("P2", "down"), ("P3", "down")]:
         ns = models.CNodeStats(oq_job=job1,
                                node=node,
                                current_status=status)
         ns.save(using="job_superv")
     job2 = engine.prepare_job()
     expected = {}
     for node, status in [("Q2", "down"), ("Q3", "down")]:
         ns = models.CNodeStats(oq_job=job2,
                                node=node,
                                current_status=status)
         ns.save(using="job_superv")
         expected[node] = ns
     self.assertEqual(expected, monitor._db_cnode_status(job2.id))
 def test__db_cnode_status_and_wrong_job_id(self):
     job = engine.prepare_job()
     expected = {}
     for node, status in [("O1", "up"), ("O2", "down"), ("O3", "down")]:
         ns = models.CNodeStats(oq_job=job, node=node,
                                current_status=status)
         ns.save(using="job_superv")
     self.assertEqual(expected, monitor._db_cnode_status(-1))
示例#17
0
 def setUp(self):
     self.job_from_file = engine._job_from_file
     self.init_logs_amqp_send = patch('openquake.logs.init_logs_amqp_send')
     self.init_logs_amqp_send.start()
     self.job = engine.prepare_job()
     self.job_profile, self.params, self.sections = (
         engine.import_job_profile(helpers.get_data_path(CONFIG_FILE),
                                   self.job))
示例#18
0
 def setUp(self):
     self.job_from_file = engine._job_from_file
     self.init_logs_amqp_send = patch('openquake.logs.init_logs_amqp_send')
     self.init_logs_amqp_send.start()
     self.job = engine.prepare_job()
     self.job_profile, self.params, self.sections = (
         engine.import_job_profile(helpers.get_data_path(CONFIG_FILE),
                                   self.job))
 def test__db_cnode_status(self):
     job = engine.prepare_job()
     expected = {}
     for node, status in [("N1", "up"), ("N2", "down"), ("N3", "down")]:
         ns = models.CNodeStats(oq_job=job, node=node,
                                current_status=status)
         ns.save(using="job_superv")
         expected[node] = ns
     self.assertEqual(expected, monitor._db_cnode_status(job.id))
示例#20
0
 def test__db_cnode_status_and_wrong_job_id(self):
     job = engine.prepare_job()
     expected = {}
     for node, status in [("O1", "up"), ("O2", "down"), ("O3", "down")]:
         ns = models.CNodeStats(oq_job=job,
                                node=node,
                                current_status=status)
         ns.save(using="job_superv")
     self.assertEqual(expected, monitor._db_cnode_status(-1))
示例#21
0
 def test__db_cnode_status(self):
     job = engine.prepare_job()
     expected = {}
     for node, status in [("N1", "up"), ("N2", "down"), ("N3", "down")]:
         ns = models.CNodeStats(oq_job=job,
                                node=node,
                                current_status=status)
         ns.save(using="job_superv")
         expected[node] = ns
     self.assertEqual(expected, monitor._db_cnode_status(job.id))
示例#22
0
    def setUp(self):
        # Test 'event-based' job
        cfg_path = helpers.testdata_path("simplecase/config.gem")
        base_path = helpers.testdata_path("simplecase")

        oq_job = engine.prepare_job()
        oq_job_profile, params, sections = engine.import_job_profile(cfg_path, oq_job)

        self.eb_job = JobContext(
            params, oq_job.id, sections=sections, base_path=base_path, oq_job_profile=oq_job_profile, oq_job=oq_job
        )
示例#23
0
    def setUp(self):
        self.job = engine.prepare_job()
        self.job_profile, params, sections = engine.import_job_profile(
            UHS_DEMO_CONFIG_FILE, self.job)

        self.job_ctxt = engine.JobContext(
            params, self.job.id, sections=sections,
            serialize_results_to=['db'], oq_job_profile=self.job_profile,
            oq_job=self.job)
        self.job_ctxt.to_kvs()
        self.job_id = self.job_ctxt.job_id
示例#24
0
    def setUp(self):
        client = kvs.get_client()

        # Delete managed job id info so we can predict the job key
        # which will be allocated for us
        client.delete(kvs.tokens.CURRENT_JOBS)

        self.generated_files = []

        job = engine.prepare_job()
        jp, params, sections = import_job_profile(helpers.get_data_path(
            CONFIG_FILE), job)
        self.job_ctxt = JobContext(
            params, job.id, sections=sections, oq_job_profile=jp, oq_job=job)

        job = engine.prepare_job()
        jp, params, sections = import_job_profile(helpers.get_data_path(
            CONFIG_WITH_INCLUDES), job)
        self.job_ctxt_with_includes = JobContext(
            params, job.id, sections=sections, oq_job_profile=jp, oq_job=job)
示例#25
0
    def setUp(self):
        self.job = engine.prepare_job()
        self.job_profile, params, sections = engine.import_job_profile(
            UHS_DEMO_CONFIG_FILE, self.job)

        self.job_ctxt = engine.JobContext(
            params, self.job.id, sections=sections,
            serialize_results_to=['db'], oq_job_profile=self.job_profile,
            oq_job=self.job)
        self.job_ctxt.to_kvs()
        self.job_id = self.job_ctxt.job_id
示例#26
0
    def setUp(self):
        # Test 'event-based' job
        cfg_path = helpers.testdata_path("simplecase/config.gem")
        base_path = helpers.testdata_path("simplecase")

        oq_job = engine.prepare_job()
        oq_job_profile, params, sections = engine.import_job_profile(
            cfg_path, oq_job)

        self.eb_job = JobContext(
            params, oq_job.id, sections=sections, base_path=base_path,
            oq_job_profile=oq_job_profile, oq_job=oq_job)
示例#27
0
    def setUp(self):
        self.job = engine.prepare_job()
        self.job_profile, self.params, _sections = (engine.import_job_profile(
            demo_file('simple_fault_demo_hazard/config.gem'), self.job))

        self.params['debug'] = 'warn'

        # Cache the calc proxy data into the kvs:
        job_ctxt = engine.JobContext(self.params,
                                     self.job.id,
                                     oq_job_profile=self.job_profile,
                                     oq_job=self.job)
        job_ctxt.to_kvs()
    def setUp(self):
        self.job = engine.prepare_job()
        self.job_profile, self.params, _sections = (
            engine.import_job_profile(demo_file(
                'simple_fault_demo_hazard/config.gem'), self.job))

        self.params['debug'] = 'warn'

        # Cache the calc proxy data into the kvs:
        job_ctxt = engine.JobContext(
            self.params, self.job.id, oq_job_profile=self.job_profile,
            oq_job=self.job)
        job_ctxt.to_kvs()
示例#29
0
    def test__serialize_xml_filenames(self):
        # Test that the file names of the loss XML artifacts are correct.
        # See https://bugs.launchpad.net/openquake/+bug/894706.
        expected_lrc_file_name = (
            'losscurves-block-#%(job_id)s-block#%(block)s.xml')
        expected_lr_file_name = (
            'losscurves-loss-block-#%(job_id)s-block#%(block)s.xml')

        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        job_ctxt = engine.JobContext(params,
                                     job.id,
                                     sections=sections,
                                     serialize_results_to=['xml', 'db'],
                                     oq_job_profile=job_profile,
                                     oq_job=job)

        calculator = ClassicalRiskCalculator(job_ctxt)

        with helpers.patch('openquake.writer.FileWriter.serialize'):
            # The 'curves' key in the kwargs just needs to be present;
            # because of the serialize mock in place above, it doesn't need
            # to have a real value.

            # First, we test loss ratio curve output,
            # then we'll do the same test for loss curve output.

            # We expect to get a single file path back.
            [file_path] = calculator._serialize(
                0, **dict(curve_mode='loss_ratio', curves=[]))

            _dir, file_name = os.path.split(file_path)

            self.assertEqual(
                expected_lrc_file_name % dict(job_id=job.id, block=0),
                file_name)

            # The same test again, except for loss curves this time.
            [file_path
             ] = calculator._serialize(0, **dict(curve_mode='loss', curves=[]))

            _dir, file_name = os.path.split(file_path)

            self.assertEqual(
                expected_lr_file_name % dict(job_id=job.id, block=0),
                file_name)
    def test__serialize_xml_filenames(self):
        # Test that the file names of the loss XML artifacts are correct.
        # See https://bugs.launchpad.net/openquake/+bug/894706.
        expected_lrc_file_name = (
            'losscurves-block-#%(job_id)s-block#%(block)s.xml')
        expected_lr_file_name = (
            'losscurves-loss-block-#%(job_id)s-block#%(block)s.xml')

        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        job_ctxt = engine.JobContext(
            params, job.id, sections=sections,
            serialize_results_to=['xml', 'db'], oq_job_profile=job_profile,
            oq_job=job)

        calculator = ClassicalRiskCalculator(job_ctxt)

        with helpers.patch('openquake.writer.FileWriter.serialize'):
            # The 'curves' key in the kwargs just needs to be present;
            # because of the serialize mock in place above, it doesn't need
            # to have a real value.

            # First, we test loss ratio curve output,
            # then we'll do the same test for loss curve output.

            # We expect to get a single file path back.
            [file_path] = calculator._serialize(
                0, **dict(curve_mode='loss_ratio', curves=[]))

            _dir, file_name = os.path.split(file_path)

            self.assertEqual(
                expected_lrc_file_name % dict(job_id=job.id,
                                              block=0),
                file_name)

            # The same test again, except for loss curves this time.
            [file_path] = calculator._serialize(
                0, **dict(curve_mode='loss', curves=[]))

            _dir, file_name = os.path.split(file_path)

            self.assertEqual(
                expected_lr_file_name % dict(job_id=job.id,
                                             block=0),
                file_name)
示例#31
0
    def test_get_site_model(self):
        job = engine.prepare_job()
        site_model_inp = models.Input(
            owner=job.owner, digest='fake', path='fake',
            input_type='site_model', size=0
        )
        site_model_inp.save()

        # The link has not yet been made in the input2job table.
        self.assertIsNone(general.get_site_model(job.id))

        # Complete the link:
        models.Input2job(input=site_model_inp, oq_job=job).save()

        actual_site_model = general.get_site_model(job.id)
        self.assertEqual(site_model_inp, actual_site_model)
示例#32
0
    def test_get_site_model(self):
        job = engine.prepare_job()
        site_model_inp = models.Input(
            owner=job.owner, digest='fake', path='fake',
            input_type='site_model', size=0
        )
        site_model_inp.save()

        # The link has not yet been made in the input2job table.
        self.assertIsNone(general.get_site_model(job.id))

        # Complete the link:
        models.Input2job(input=site_model_inp, oq_job=job).save()

        actual_site_model = general.get_site_model(job.id)
        self.assertEqual(site_model_inp, actual_site_model)
    def setUp(self):
        cfg_path = helpers.demo_file(
            'probabilistic_event_based_risk/config.gem')

        job = engine.prepare_job()
        jp, params, sections = engine.import_job_profile(cfg_path, job)

        job_ctxt = engine.JobContext(
            params, 1, sections=sections, base_path='/tmp',
            serialize_results_to=['db', 'xml'], oq_job_profile=jp, oq_job=job)
        job_ctxt.blocks_keys = []

        self.calculator = EventBasedRiskCalculator(job_ctxt)
        self.calculator.store_exposure_assets = lambda: None
        self.calculator.store_vulnerability_model = lambda: None
        self.calculator.partition = lambda: None
示例#34
0
def main():
    arg_parser = set_up_arg_parser()
    args = arg_parser.parse_args()

    if args.version:
        print utils_version.info(__version__)
    elif args.config_file is not None:
        from openquake import job
        from openquake import engine

        try:
            if args.log_file is not None:
                # Capture logging messages to a file.
                try:
                    _touch_log_file(args.log_file)
                except IOError as e:
                    raise IOError("Error writing to log file %s: %s" % (args.log_file, e.strerror))

            user_name = getpass.getuser()
            ajob = engine.prepare_job(user_name)
            _, params, sections = engine.import_job_profile(args.config_file, ajob, user_name, args.force_inputs)
            engine.run_job(
                ajob,
                params,
                sections,
                output_type=args.output_type,
                log_level=args.log_level,
                force_inputs=args.force_inputs,
                log_file=args.log_file,
            )
        except job.config.ValidationException as e:
            print str(e)
        except IOError as e:
            print str(e)
        except Exception as e:
            raise
    elif args.list_calculations:
        list_calculations()
    elif args.list_outputs is not None:
        list_outputs(args.list_outputs)
    elif args.export is not None:
        output_id, target_dir = args.export
        output_id = int(output_id)

        do_export(output_id, target_dir)
    else:
        arg_parser.print_usage()
示例#35
0
    def test_write_output(self):
        # Test that the loss map writers are properly called when
        # write_output is invoked.
        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        # Set conditional loss poe so that loss maps are created.
        # If this parameter is not specified, no loss maps will be serialized
        # at the end of the job.
        params['CONDITIONAL_LOSS_POE'] = '0.01'
        job_profile.conditional_loss_poe = [0.01]
        job_profile.save()

        job_ctxt = engine.JobContext(params,
                                     job.id,
                                     sections=sections,
                                     serialize_results_to=['xml', 'db'],
                                     oq_job_profile=job_profile,
                                     oq_job=job)

        calculator = ClassicalRiskCalculator(job_ctxt)

        # Mock the composed loss map serializer:
        with helpers.patch('openquake.writer.CompositeWriter'
                           '.serialize') as writer_mock:
            calculator.write_output()

            self.assertEqual(1, writer_mock.call_count)

            # Now test that the composite writer got the correct
            # 'serialize to' instructions. The composite writer should have
            # 1 DB and 1 XML loss map serializer:
            composite_writer = writer_mock.call_args[0][0]
            writers = composite_writer.writers

            self.assertEqual(2, len(writers))
            # We don't assume anything about the order of the writers,
            # and we don't care anyway in this test:
            self.assertTrue(
                any(isinstance(w, LossMapDBWriter) for w in writers))
            self.assertTrue(
                any(
                    isinstance(w, LossMapNonScenarioXMLWriter)
                    for w in writers))
示例#36
0
    def setUp(self):
        cfg_path = helpers.demo_file(
            'probabilistic_event_based_risk/config.gem')

        job = engine.prepare_job()
        jp, params, sections = engine.import_job_profile(cfg_path, job)

        job_ctxt = engine.JobContext(
            params, 1, sections=sections, base_path='/tmp',
            serialize_results_to=['db', 'xml'], oq_job_profile=jp, oq_job=job)
        job_ctxt.blocks_keys = []

        self.calculator = EventBasedRiskCalculator(job_ctxt)
        self.calculator.store_exposure_assets = lambda: None
        self.calculator.store_fragility_model = lambda: None
        self.calculator.store_vulnerability_model = lambda: None
        self.calculator.partition = lambda: None
示例#37
0
def main():
    arg_parser = set_up_arg_parser()
    args = arg_parser.parse_args()

    if args.version:
        print utils_version.info(__version__)
    elif args.config_file is not None:
        from openquake import job
        from openquake import engine
        try:
            if args.log_file is not None:
                # Capture logging messages to a file.
                try:
                    _touch_log_file(args.log_file)
                except IOError as e:
                    raise IOError('Error writing to log file %s: %s' %
                                  (args.log_file, e.strerror))

            user_name = getpass.getuser()
            ajob = engine.prepare_job(user_name)
            _, params, sections = engine.import_job_profile(
                args.config_file, ajob, user_name, args.force_inputs)
            engine.run_job(ajob,
                           params,
                           sections,
                           output_type=args.output_type,
                           log_level=args.log_level,
                           force_inputs=args.force_inputs,
                           log_file=args.log_file)
        except job.config.ValidationException as e:
            print str(e)
        except IOError as e:
            print str(e)
        except Exception as e:
            raise
    elif args.list_calculations:
        list_calculations()
    elif args.list_outputs is not None:
        list_outputs(args.list_outputs)
    elif args.export is not None:
        output_id, target_dir = args.export
        output_id = int(output_id)

        do_export(output_id, target_dir)
    else:
        arg_parser.print_usage()
示例#38
0
    def test__launch_job_calls_core_calc_methods(self):
        # The `Calculator` interface defines 4 general methods:
        # - initialize
        # - pre_execute
        # - execute
        # - post_execute
        # When `_launch_job` is called, each of these methods should be
        # called once per job type (hazard, risk).

        # Calculation setup:
        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        job_ctxt = engine.JobContext(
            params, job.id, sections=sections,
            serialize_results_to=['xml', 'db'],
            oq_job_profile=job_profile, oq_job=job)

        # Mocking setup:
        cls_haz_calc = ('openquake.calculators.hazard.classical.core'
                        '.ClassicalHazardCalculator')
        cls_risk_calc = ('openquake.calculators.risk.classical.core'
                         '.ClassicalRiskCalculator')
        methods = ('initialize', 'pre_execute', 'execute', 'post_execute')
        haz_patchers = [helpers.patch('%s.%s' % (cls_haz_calc, m))
                        for m in methods]
        risk_patchers = [helpers.patch('%s.%s' % (cls_risk_calc, m))
                         for m in methods]

        haz_mocks = [p.start() for p in haz_patchers]
        risk_mocks = [p.start() for p in risk_patchers]

        # Call the function under test:
        engine._launch_job(job_ctxt, sections)

        self.assertTrue(all(x.call_count == 1 for x in haz_mocks))
        self.assertTrue(all(x.call_count == 1 for x in risk_mocks))

        # Tear down the mocks:
        for p in haz_patchers:
            p.stop()
        for p in risk_patchers:
            p.stop()
示例#39
0
    def test__launch_job_calls_core_calc_methods(self):
        # The `Calculator` interface defines 4 general methods:
        # - initialize
        # - pre_execute
        # - execute
        # - post_execute
        # When `_launch_job` is called, each of these methods should be
        # called once per job type (hazard, risk).

        # Calculation setup:
        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        job_ctxt = engine.JobContext(
            params, job.id, sections=sections,
            serialize_results_to=['xml', 'db'],
            oq_job_profile=job_profile, oq_job=job)

        # Mocking setup:
        cls_haz_calc = ('openquake.calculators.hazard.classical.core'
                        '.ClassicalHazardCalculator')
        cls_risk_calc = ('openquake.calculators.risk.classical.core'
                         '.ClassicalRiskCalculator')
        methods = ('initialize', 'pre_execute', 'execute', 'post_execute')
        haz_patchers = [helpers.patch('%s.%s' % (cls_haz_calc, m))
                        for m in methods]
        risk_patchers = [helpers.patch('%s.%s' % (cls_risk_calc, m))
                         for m in methods]

        haz_mocks = [p.start() for p in haz_patchers]
        risk_mocks = [p.start() for p in risk_patchers]

        # Call the function under test:
        engine._launch_job(job_ctxt, sections)

        self.assertTrue(all(x.call_count == 1 for x in haz_mocks))
        self.assertTrue(all(x.call_count == 1 for x in risk_mocks))

        # Tear down the mocks:
        for p in haz_patchers:
            p.stop()
        for p in risk_patchers:
            p.stop()
示例#40
0
def prepare_job_context(path_to_cfg):
    """Given a path to a config file, prepare and return a
    :class:`openquake.engine.JobContext`. This convenient because it can be
    immediately passed to a calculator constructor.

    This also creates the necessary job and oq_job_profile records.
    """
    job = engine.prepare_job()

    cfg = demo_file(path_to_cfg)

    job_profile, params, sections = engine.import_job_profile(
        cfg, job, force_inputs=True)

    job_ctxt = engine.JobContext(
        params, job.id, sections=sections, oq_job_profile=job_profile,
        oq_job=job)

    return job_ctxt
    def test_write_output(self):
        # Test that the loss map writers are properly called when
        # write_output is invoked.
        cfg_file = helpers.demo_file('classical_psha_based_risk/config.gem')

        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(
            cfg_file, job)

        # Set conditional loss poe so that loss maps are created.
        # If this parameter is not specified, no loss maps will be serialized
        # at the end of the job.
        params['CONDITIONAL_LOSS_POE'] = '0.01'
        job_profile.conditional_loss_poe = [0.01]
        job_profile.save()

        job_ctxt = engine.JobContext(
            params, job.id, sections=sections,
            serialize_results_to=['xml', 'db'], oq_job_profile=job_profile,
            oq_job=job)

        calculator = ClassicalRiskCalculator(job_ctxt)

        # Mock the composed loss map serializer:
        with helpers.patch('openquake.writer.CompositeWriter'
                           '.serialize') as writer_mock:
            calculator.write_output()

            self.assertEqual(1, writer_mock.call_count)

            # Now test that the composite writer got the correct
            # 'serialize to' instructions. The composite writer should have
            # 1 DB and 1 XML loss map serializer:
            composite_writer = writer_mock.call_args[0][0]
            writers = composite_writer.writers

            self.assertEqual(2, len(writers))
            # We don't assume anything about the order of the writers,
            # and we don't care anyway in this test:
            self.assertTrue(any(
                isinstance(w, LossMapDBWriter) for w in writers))
            self.assertTrue(any(
                isinstance(w, LossMapNonScenarioXMLWriter) for w in writers))
示例#42
0
def prepare_job_context(path_to_cfg):
    """Given a path to a config file, prepare and return a
    :class:`openquake.engine.JobContext`. This convenient because it can be
    immediately passed to a calculator constructor.

    This also creates the necessary job and oq_job_profile records.
    """
    job = engine.prepare_job()

    cfg = demo_file(path_to_cfg)

    job_profile, params, sections = engine.import_job_profile(
        cfg, job, force_inputs=True)

    job_ctxt = engine.JobContext(params,
                                 job.id,
                                 sections=sections,
                                 oq_job_profile=job_profile,
                                 oq_job=job)

    return job_ctxt
示例#43
0
    def setup_classic_job(cls,
                          create_job_path=True,
                          upload_id=None,
                          inputs=None,
                          force_inputs=False,
                          omit_profile=False,
                          user_name="openquake"):
        """Create a classic job with associated upload and inputs.

        :param bool create_job_path: if set the path for the job will be
            created and captured in the job record
        :param integer upload_id: if set use upload record with given db key.
        :param list inputs: a list of 2-tuples where the first and the second
            element are the input type and path respectively
        :param bool force_inputs: If `True` the model input files will be
            parsed and the resulting content written to the database no matter
            what.
        :param bool omit_profile: If `True` no job profile will be created.
        :param str user_name: The name of the user that is running the job.
        :returns: a :py:class:`db.models.OqJob` instance
        """
        assert upload_id is None  # temporary

        job = engine.prepare_job(user_name)
        if not omit_profile:
            oqjp = cls.setup_job_profile(job, force_inputs)
            models.Job2profile(oq_job=job, oq_job_profile=oqjp).save()

        # Insert input model files
        if inputs:
            insert_inputs(job, inputs)

        if create_job_path:
            job.path = os.path.join(tempfile.mkdtemp(), str(job.id))
            job.save()

            os.mkdir(job.path)
            os.chmod(job.path, 0777)

        return job
示例#44
0
    def setUpClass(cls):
        cls.job = engine.prepare_job()
        jp, _, _ = engine.import_job_profile(RISK_DEMO_CONFIG_FILE, cls.job)
        calc_proxy = helpers.create_job({},
                                        job_id=cls.job.id,
                                        oq_job_profile=jp,
                                        oq_job=cls.job)

        # storing the basic exposure model
        ClassicalRiskCalculator(calc_proxy).store_exposure_assets()
        [input] = models.inputs4job(cls.job.id, input_type="exposure")
        model = input.model()
        assets = model.exposuredata_set.filter(taxonomy="aa/aatc-D/LR")

        if not assets:
            # This model did not exist in the database before.
            site = shapes.Site(1.0, 2.0)
            # more assets at same location
            models.ExposureData(exposure_model=model,
                                taxonomy="aa/aatc-D/LR",
                                asset_ref="ASSET_1",
                                stco=1,
                                site=geos.GEOSGeometry(site.point.to_wkt()),
                                reco=1).save()

            models.ExposureData(exposure_model=model,
                                taxonomy="aa/aatc-D/LR",
                                asset_ref="ASSET_2",
                                stco=1,
                                site=geos.GEOSGeometry(site.point.to_wkt()),
                                reco=1).save()

            site = shapes.Site(2.0, 2.0)
            # just one asset at location
            models.ExposureData(exposure_model=model,
                                taxonomy="aa/aatc-D/LR",
                                asset_ref="ASSET_3",
                                stco=1,
                                site=geos.GEOSGeometry(site.point.to_wkt()),
                                reco=1).save()
示例#45
0
    def test_get_site_model_too_many_site_models(self):
        job = engine.prepare_job()
        site_model_inp1 = models.Input(
            owner=job.owner, digest='fake', path='fake',
            input_type='site_model', size=0
        )
        site_model_inp1.save()
        site_model_inp2 = models.Input(
            owner=job.owner, digest='fake', path='fake',
            input_type='site_model', size=0
        )
        site_model_inp2.save()

        # link both site models to the job:
        models.Input2job(input=site_model_inp1, oq_job=job).save()
        models.Input2job(input=site_model_inp2, oq_job=job).save()

        with self.assertRaises(RuntimeError) as assert_raises:
            general.get_site_model(job.id)

        self.assertEqual('Only 1 site model per job is allowed, found 2.',
                         assert_raises.exception.message)
示例#46
0
    def test_get_site_model_too_many_site_models(self):
        job = engine.prepare_job()
        site_model_inp1 = models.Input(
            owner=job.owner, digest='fake', path='fake',
            input_type='site_model', size=0
        )
        site_model_inp1.save()
        site_model_inp2 = models.Input(
            owner=job.owner, digest='fake', path='fake',
            input_type='site_model', size=0
        )
        site_model_inp2.save()

        # link both site models to the job:
        models.Input2job(input=site_model_inp1, oq_job=job).save()
        models.Input2job(input=site_model_inp2, oq_job=job).save()

        with self.assertRaises(RuntimeError) as assert_raises:
            general.get_site_model(job.id)

        self.assertEqual('Only 1 site model per job is allowed, found 2.',
                         assert_raises.exception.message)
    def setUp(self):
        kvs.get_client().flushall()

        base_path = helpers.testdata_path("scenario")
        job = engine.prepare_job()
        self.job_profile, self.params, self.sections = (
            engine.import_job_profile(SCENARIO_SMOKE_TEST, job))
        self.job_ctxt = JobContext(
            self.params, job.id, sections=self.sections,
            base_path=base_path, oq_job_profile=self.job_profile,
            oq_job=job)

        self.job_ctxt.params[NUMBER_OF_CALC_KEY] = "1"

        self.job_ctxt.params['SERIALIZE_RESULTS_TO'] = 'xml'

        # saving the default java implementation
        self.default = (
            scenario.ScenarioHazardCalculator.compute_ground_motion_field)

        self.grid = self.job_ctxt.region.grid

        self.job_ctxt.to_kvs()
示例#48
0
    def setup_classic_job(cls, create_job_path=True, upload_id=None,
                          inputs=None, force_inputs=False, omit_profile=False,
                          user_name="openquake"):
        """Create a classic job with associated upload and inputs.

        :param bool create_job_path: if set the path for the job will be
            created and captured in the job record
        :param integer upload_id: if set use upload record with given db key.
        :param list inputs: a list of 2-tuples where the first and the second
            element are the input type and path respectively
        :param bool force_inputs: If `True` the model input files will be
            parsed and the resulting content written to the database no matter
            what.
        :param bool omit_profile: If `True` no job profile will be created.
        :param str user_name: The name of the user that is running the job.
        :returns: a :py:class:`db.models.OqJob` instance
        """
        assert upload_id is None  # temporary

        job = engine.prepare_job(user_name)
        if not omit_profile:
            oqjp = cls.setup_job_profile(job, force_inputs)
            models.Job2profile(oq_job=job, oq_job_profile=oqjp).save()

        # Insert input model files
        if inputs:
            insert_inputs(job, inputs)

        if create_job_path:
            job.path = os.path.join(tempfile.mkdtemp(), str(job.id))
            job.save()

            os.mkdir(job.path)
            os.chmod(job.path, 0777)

        return job
示例#49
0
 def setUp(self):
     self.job = engine.prepare_job()
     self.job.hazard_calculation = HazardCalculation(no_progress_timeout=99)
示例#50
0
    def test_generate_hazard_curves_using_classical_psha(self):
        def verify_realization_haz_curves_stored_to_kvs(the_job, keys):
            """ This just tests to make sure there something in the KVS
            for each key in given list of keys. This does NOT test the
            actual results. """
            # TODO (LB): At some point we need to test the actual
            # results to verify they are correct

            realizations = int(the_job.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])

            for realization in xrange(0, realizations):
                for site in the_job.sites_to_compute():
                    key = tokens.hazard_curve_poes_key(the_job.job_id,
                                                       realization, site)
                    self.assertTrue(key in keys, "Missing key %s" % key)

        def verify_mean_haz_curves_stored_to_kvs(the_job, keys):
            """ Make sure that the keys and non-empty values for mean
            hazard curves have been written to KVS."""

            if the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true':

                LOG.debug("verifying KVS entries for mean hazard curves")
                for site in the_job.sites_to_compute():
                    key = tokens.mean_hazard_curve_key(the_job.job_id, site)
                    self.assertTrue(key in keys, "Missing key %s" % key)

        def verify_mean_haz_maps_stored_to_kvs(the_job, calculator, keys):
            """ Make sure that the keys and non-empty values for mean
            hazard maps have been written to KVS."""

            if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and
                the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \
                'true'):

                LOG.debug("verifying KVS entries for mean hazard maps")

                for poe in calculator.poes_hazard_maps:
                    for site in the_job.sites_to_compute():
                        key = tokens.mean_hazard_map_key(
                            the_job.job_id, site, poe)
                        self.assertTrue(key in keys, "Missing key %s" % key)

        def verify_quantile_haz_curves_stored_to_kvs(the_job, calculator,
                                                     keys):
            """ Make sure that the keys and non-empty values for quantile
            hazard curves have been written to KVS."""

            quantiles = calculator.quantile_levels

            LOG.debug("verifying KVS entries for quantile hazard curves, "\
                "%s quantile values" % len(quantiles))

            for quantile in quantiles:
                for site in the_job.sites_to_compute():
                    key = tokens.quantile_hazard_curve_key(
                        the_job.job_id, site, quantile)
                    self.assertTrue(key in keys, "Missing key %s" % key)

        def verify_quantile_haz_maps_stored_to_kvs(the_job, calculator, keys):
            """ Make sure that the keys and non-empty values for quantile
            hazard maps have been written to KVS."""

            quantiles = calculator.quantile_levels

            if (the_job.params[hazard_general.POES_PARAM_NAME] != ''
                    and len(quantiles) > 0):

                poes = calculator.poes_hazard_maps

                LOG.debug("verifying KVS entries for quantile hazard maps, "\
                    "%s quantile values, %s PoEs" % (
                    len(quantiles), len(poes)))

                for quantile in quantiles:
                    for poe in poes:
                        for site in the_job.sites_to_compute():
                            key = tokens.quantile_hazard_map_key(
                                the_job.job_id, site, poe, quantile)
                            self.assertTrue(key in keys,
                                            "Missing key %s" % key)

        def verify_realization_haz_curves_stored_to_nrml(the_job, calculator):
            """Tests that a NRML file has been written for each realization,
            and that this file validates against the NRML schema.
            Does NOT test if results in NRML file are correct.
            """
            realizations = int(the_job.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])
            for realization in xrange(0, realizations):

                nrml_path = os.path.join(
                    "demos/classical_psha_simple/computed_output",
                    calculator.hazard_curve_filename(realization))

                LOG.debug("validating NRML file %s" % nrml_path)

                self.assertTrue(xml.validates_against_xml_schema(
                    nrml_path, NRML_SCHEMA_PATH),
                    "NRML instance file %s does not validate against schema" \
                    % nrml_path)

        def verify_mean_haz_curves_stored_to_nrml(the_job, calculator):
            """Tests that a mean hazard curve NRML file has been written,
            and that this file validates against the NRML schema.
            Does NOT test if results in NRML file are correct.
            """

            if the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true':
                nrml_path = os.path.join(
                    "demos/classical_psha_simple/computed_output",
                    calculator.mean_hazard_curve_filename())

                LOG.debug("validating NRML file %s" % nrml_path)

                self.assertTrue(xml.validates_against_xml_schema(
                    nrml_path, NRML_SCHEMA_PATH),
                    "NRML instance file %s does not validate against schema" \
                    % nrml_path)

        def verify_mean_haz_maps_stored_to_nrml(the_job):
            """Tests that a mean hazard map NRML file has been written,
            and that this file validates against the NRML schema.
            Does NOT test if results in NRML file are correct.
            """
            if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and
                the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \
                'true'):

                for poe in calculator.poes_hazard_maps:
                    nrml_path = os.path.join(
                        "demos/classical_psha_simple/computed_output",
                        calculator.mean_hazard_map_filename(poe))

                    LOG.debug("validating NRML file for mean hazard map %s" \
                        % nrml_path)

                    self.assertTrue(xml.validates_against_xml_schema(
                        nrml_path, NRML_SCHEMA_PATH),
                        "NRML instance file %s does not validate against "\
                        "schema" % nrml_path)

        def verify_quantile_haz_curves_stored_to_nrml(the_job, calculator):
            """Tests that quantile hazard curve NRML files have been written,
            and that these file validate against the NRML schema.
            Does NOT test if results in NRML files are correct.
            """

            for quantile in calculator.quantile_levels:

                nrml_path = os.path.join(
                    "demos/classical_psha_simple/computed_output",
                    calculator.quantile_hazard_curve_filename(quantile))

                LOG.debug("validating NRML file for quantile hazard curve: "\
                    "%s" % nrml_path)

                self.assertTrue(xml.validates_against_xml_schema(
                    nrml_path, NRML_SCHEMA_PATH),
                    "NRML instance file %s does not validate against schema" \
                    % nrml_path)

        def verify_quantile_haz_maps_stored_to_nrml(the_job, calculator):
            """Tests that quantile hazard map NRML files have been written,
            and that these file validate against the NRML schema.
            Does NOT test if results in NRML files are correct.
            """

            quantiles = calculator.quantile_levels

            if (the_job.params[hazard_general.POES_PARAM_NAME] != ''
                    and len(quantiles) > 0):

                for poe in calculator.poes_hazard_maps:
                    for quantile in quantiles:
                        nrml_path = os.path.join(
                            "demos/classical_psha_simple/computed_output",
                            calculator.quantile_hazard_map_filename(
                                quantile, poe))

                        LOG.debug("validating NRML file for quantile hazard "\
                            "map: %s" % nrml_path)

                        self.assertTrue(xml.validates_against_xml_schema(
                            nrml_path, NRML_SCHEMA_PATH),
                            "NRML instance file %s does not validate against "\
                            "schema" % nrml_path)

        base_path = helpers.testdata_path("classical_psha_simple")
        path = helpers.testdata_path("classical_psha_simple/config.gem")
        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(path, job)

        the_job = JobContext(params,
                             job.id,
                             sections=sections,
                             base_path=base_path,
                             serialize_results_to=['db', 'xml'],
                             oq_job_profile=job_profile,
                             oq_job=job)
        the_job.to_kvs()

        calc_mode = job_profile.calc_mode
        calculator = CALCULATORS[calc_mode](the_job)

        used_keys = []
        calculator.execute(used_keys)

        verify_realization_haz_curves_stored_to_kvs(the_job, used_keys)
        verify_realization_haz_curves_stored_to_nrml(the_job, calculator)

        # hazard curves: check results of mean and quantile computation
        verify_mean_haz_curves_stored_to_kvs(the_job, used_keys)
        verify_quantile_haz_curves_stored_to_kvs(the_job, calculator,
                                                 used_keys)

        verify_mean_haz_curves_stored_to_nrml(the_job, calculator)
        verify_quantile_haz_curves_stored_to_nrml(the_job, calculator)

        # hazard maps: check results of mean and quantile computation
        verify_mean_haz_maps_stored_to_kvs(the_job, calculator, used_keys)
        verify_quantile_haz_maps_stored_to_kvs(the_job, calculator, used_keys)

        verify_mean_haz_maps_stored_to_nrml(the_job)
        verify_quantile_haz_maps_stored_to_nrml(the_job, calculator)
示例#51
0
    def test_generate_hazard_curves_using_classical_psha(self):

        def verify_realization_haz_curves_stored_to_kvs(the_job, keys):
            """ This just tests to make sure there something in the KVS
            for each key in given list of keys. This does NOT test the
            actual results. """
            # TODO (LB): At some point we need to test the actual
            # results to verify they are correct

            realizations = int(
                the_job.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])

            for realization in xrange(0, realizations):
                for site in the_job.sites_to_compute():
                    key = tokens.hazard_curve_poes_key(
                        the_job.job_id, realization, site)
                    self.assertTrue(key in keys, "Missing key %s" % key)

        def verify_mean_haz_curves_stored_to_kvs(the_job, keys):
            """ Make sure that the keys and non-empty values for mean
            hazard curves have been written to KVS."""

            if the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true':

                LOG.debug("verifying KVS entries for mean hazard curves")
                for site in the_job.sites_to_compute():
                    key = tokens.mean_hazard_curve_key(the_job.job_id, site)
                    self.assertTrue(key in keys, "Missing key %s" % key)

        def verify_mean_haz_maps_stored_to_kvs(the_job, calculator, keys):
            """ Make sure that the keys and non-empty values for mean
            hazard maps have been written to KVS."""

            if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and
                the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \
                'true'):

                LOG.debug("verifying KVS entries for mean hazard maps")

                for poe in calculator.poes_hazard_maps:
                    for site in the_job.sites_to_compute():
                        key = tokens.mean_hazard_map_key(
                            the_job.job_id, site, poe)
                        self.assertTrue(key in keys, "Missing key %s" % key)

        def verify_quantile_haz_curves_stored_to_kvs(the_job, calculator,
                                                     keys):
            """ Make sure that the keys and non-empty values for quantile
            hazard curves have been written to KVS."""

            quantiles = calculator.quantile_levels

            LOG.debug("verifying KVS entries for quantile hazard curves, "\
                "%s quantile values" % len(quantiles))

            for quantile in quantiles:
                for site in the_job.sites_to_compute():
                    key = tokens.quantile_hazard_curve_key(
                        the_job.job_id, site, quantile)
                    self.assertTrue(key in keys, "Missing key %s" % key)

        def verify_quantile_haz_maps_stored_to_kvs(the_job, calculator, keys):
            """ Make sure that the keys and non-empty values for quantile
            hazard maps have been written to KVS."""

            quantiles = calculator.quantile_levels

            if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and
                len(quantiles) > 0):

                poes = calculator.poes_hazard_maps

                LOG.debug("verifying KVS entries for quantile hazard maps, "\
                    "%s quantile values, %s PoEs" % (
                    len(quantiles), len(poes)))

                for quantile in quantiles:
                    for poe in poes:
                        for site in the_job.sites_to_compute():
                            key = tokens.quantile_hazard_map_key(
                                the_job.job_id, site, poe, quantile)
                            self.assertTrue(
                                key in keys, "Missing key %s" % key)

        def verify_realization_haz_curves_stored_to_nrml(the_job, calculator):
            """Tests that a NRML file has been written for each realization,
            and that this file validates against the NRML schema.
            Does NOT test if results in NRML file are correct.
            """
            realizations = int(
                the_job.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])
            for realization in xrange(0, realizations):

                nrml_path = os.path.join(
                    "demos/classical_psha_simple/computed_output",
                    calculator.hazard_curve_filename(realization))

                LOG.debug("validating NRML file %s" % nrml_path)

                self.assertTrue(xml.validates_against_xml_schema(
                    nrml_path, NRML_SCHEMA_PATH),
                    "NRML instance file %s does not validate against schema" \
                    % nrml_path)

        def verify_mean_haz_curves_stored_to_nrml(the_job, calculator):
            """Tests that a mean hazard curve NRML file has been written,
            and that this file validates against the NRML schema.
            Does NOT test if results in NRML file are correct.
            """

            if the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == 'true':
                nrml_path = os.path.join(
                    "demos/classical_psha_simple/computed_output",
                    calculator.mean_hazard_curve_filename())

                LOG.debug("validating NRML file %s" % nrml_path)

                self.assertTrue(xml.validates_against_xml_schema(
                    nrml_path, NRML_SCHEMA_PATH),
                    "NRML instance file %s does not validate against schema" \
                    % nrml_path)

        def verify_mean_haz_maps_stored_to_nrml(the_job):
            """Tests that a mean hazard map NRML file has been written,
            and that this file validates against the NRML schema.
            Does NOT test if results in NRML file are correct.
            """
            if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and
                the_job.params['COMPUTE_MEAN_HAZARD_CURVE'].lower() == \
                'true'):

                for poe in calculator.poes_hazard_maps:
                    nrml_path = os.path.join(
                        "demos/classical_psha_simple/computed_output",
                        calculator.mean_hazard_map_filename(poe))

                    LOG.debug("validating NRML file for mean hazard map %s" \
                        % nrml_path)

                    self.assertTrue(xml.validates_against_xml_schema(
                        nrml_path, NRML_SCHEMA_PATH),
                        "NRML instance file %s does not validate against "\
                        "schema" % nrml_path)

        def verify_quantile_haz_curves_stored_to_nrml(the_job, calculator):
            """Tests that quantile hazard curve NRML files have been written,
            and that these file validate against the NRML schema.
            Does NOT test if results in NRML files are correct.
            """

            for quantile in calculator.quantile_levels:

                nrml_path = os.path.join(
                    "demos/classical_psha_simple/computed_output",
                    calculator.quantile_hazard_curve_filename(quantile))

                LOG.debug("validating NRML file for quantile hazard curve: "\
                    "%s" % nrml_path)

                self.assertTrue(xml.validates_against_xml_schema(
                    nrml_path, NRML_SCHEMA_PATH),
                    "NRML instance file %s does not validate against schema" \
                    % nrml_path)

        def verify_quantile_haz_maps_stored_to_nrml(the_job, calculator):
            """Tests that quantile hazard map NRML files have been written,
            and that these file validate against the NRML schema.
            Does NOT test if results in NRML files are correct.
            """

            quantiles = calculator.quantile_levels

            if (the_job.params[hazard_general.POES_PARAM_NAME] != '' and
                len(quantiles) > 0):

                for poe in calculator.poes_hazard_maps:
                    for quantile in quantiles:
                        nrml_path = os.path.join(
                            "demos/classical_psha_simple/computed_output",
                            calculator.quantile_hazard_map_filename(quantile,
                                                                   poe))

                        LOG.debug("validating NRML file for quantile hazard "\
                            "map: %s" % nrml_path)

                        self.assertTrue(xml.validates_against_xml_schema(
                            nrml_path, NRML_SCHEMA_PATH),
                            "NRML instance file %s does not validate against "\
                            "schema" % nrml_path)

        base_path = helpers.testdata_path("classical_psha_simple")
        path = helpers.testdata_path("classical_psha_simple/config.gem")
        job = engine.prepare_job()
        job_profile, params, sections = engine.import_job_profile(path, job)

        the_job = JobContext(
            params, job.id, sections=sections, base_path=base_path,
            serialize_results_to=['db', 'xml'], oq_job_profile=job_profile,
            oq_job=job)
        the_job.to_kvs()

        calc_mode = job_profile.calc_mode
        calculator = CALCULATORS[calc_mode](the_job)

        used_keys = []
        calculator.execute(used_keys)

        verify_realization_haz_curves_stored_to_kvs(the_job, used_keys)
        verify_realization_haz_curves_stored_to_nrml(the_job, calculator)

        # hazard curves: check results of mean and quantile computation
        verify_mean_haz_curves_stored_to_kvs(the_job, used_keys)
        verify_quantile_haz_curves_stored_to_kvs(the_job, calculator,
                                                 used_keys)

        verify_mean_haz_curves_stored_to_nrml(the_job, calculator)
        verify_quantile_haz_curves_stored_to_nrml(the_job, calculator)

        # hazard maps: check results of mean and quantile computation
        verify_mean_haz_maps_stored_to_kvs(the_job, calculator, used_keys)
        verify_quantile_haz_maps_stored_to_kvs(the_job, calculator, used_keys)

        verify_mean_haz_maps_stored_to_nrml(the_job)
        verify_quantile_haz_maps_stored_to_nrml(the_job, calculator)
示例#52
0
 def setUpClass(cls):
     cls.job = engine.prepare_job()
示例#53
0
 def setUp(self):
     self.job = engine.prepare_job()
示例#54
0
 def setUpClass(cls):
     cls.job = engine.prepare_job()
示例#55
0
 def setUp(self):
     self.job = engine.prepare_job()