Example #1
0
 def test__db_cnode_status_and_two_jobs(self):
     job1 = engine.prepare_job()
     for node, status in [("P1", "up"), ("P2", "down"), ("P3", "down")]:
         ns = models.CNodeStats(oq_job=job1, node=node,
                                current_status=status)
         ns.save(using="job_init")
     job2 = engine.prepare_job()
     expected = {}
     for node, status in [("Q2", "down"), ("Q3", "down")]:
         ns = models.CNodeStats(oq_job=job2, node=node,
                                current_status=status)
         ns.save(using="job_init")
         expected[node] = ns
     self.assertEqual(expected, monitor._db_cnode_status(job2.id))
    def test_validate_warns(self):
        # Test that `validate` raises warnings if unnecessary parameters are
        # specified for a given calculation.
        # For example, `ses_per_logic_tree_path` is an event-based hazard
        # param; if this param is specified for a classical hazard job, a
        # warning should be raised.
        cfg_file = helpers.get_data_path('simple_fault_demo_hazard/job.ini')
        job = engine.prepare_job()
        params = engine.parse_config(open(cfg_file, 'r'))
        # Add a few superfluous parameters:
        params['ses_per_logic_tree_path'] = 5
        params['ground_motion_correlation_model'] = 'JB2009'
        calculation = engine.create_calculation(
            models.HazardCalculation, params)
        job.hazard_calculation = calculation
        job.save()

        with warnings.catch_warnings(record=True) as w:
            validation.validate(job, 'hazard', params, ['xml'])

        expected_warnings = [
            "Unknown parameter '%s' for calculation mode 'classical'."
            " Ignoring." % x for x in ('ses_per_logic_tree_path',
                                       'ground_motion_correlation_model')
        ]

        actual_warnings = [m.message.message for m in w]
        self.assertEqual(sorted(expected_warnings), sorted(actual_warnings))
Example #3
0
    def setUp(self):
        self.job = engine.prepare_job()
        self.calc = disagg_core.DisaggHazardCalculator(self.job)

        # Mock `disagg_task_arg_gen`
        disagg_path = 'openquake.engine.calculators.hazard.disaggregation'
        self.disagg_tag_patch = helpers.patch(
            '%s.core.DisaggHazardCalculator.disagg_task_arg_gen'
            % disagg_path)
        self.disagg_tag_mock = self.disagg_tag_patch.start()
        # fake disagg task arg generator:
        disagg_tag = iter(xrange(3))
        self.disagg_tag_mock.return_value = disagg_tag

        # Mock `haz_general.queue_next`
        base_path = 'openquake.engine.calculators.base'
        self.queue_next_patch = helpers.patch('%s.queue_next' % base_path)
        self.queue_next_mock = self.queue_next_patch.start()

        # Mock `finalize_hazard_curves`
        general_path = 'openquake.engine.calculators.hazard.general'
        self.finalize_curves_patch = helpers.patch(
            '%s.BaseHazardCalculator.finalize_hazard_curves'
            % general_path)
        self.finalize_curves_mock = self.finalize_curves_patch.start()
Example #4
0
    def setUp(self):
        self.job = engine.prepare_job()
        self.calc = disagg_core.DisaggHazardCalculator(self.job)

        # Mock `disagg_task_arg_gen`
        disagg_path = 'openquake.engine.calculators.hazard.disaggregation'
        self.disagg_tag_patch = helpers.patch(
            '%s.core.DisaggHazardCalculator.disagg_task_arg_gen'
            % disagg_path)
        self.disagg_tag_mock = self.disagg_tag_patch.start()
        # fake disagg task arg generator:
        disagg_tag = iter(xrange(3))
        self.disagg_tag_mock.return_value = disagg_tag

        # Mock `haz_general.queue_next`
        base_path = 'openquake.engine.calculators.base'
        self.queue_next_patch = helpers.patch('%s.queue_next' % base_path)
        self.queue_next_mock = self.queue_next_patch.start()

        # Mock `finalize_hazard_curves`
        general_path = 'openquake.engine.calculators.hazard.general'
        self.finalize_curves_patch = helpers.patch(
            '%s.BaseHazardCalculator.finalize_hazard_curves'
            % general_path)
        self.finalize_curves_mock = self.finalize_curves_patch.start()
Example #5
0
 def test__db_cnode_status_and_two_jobs(self):
     job1 = engine.prepare_job()
     for node, status in [("P1", "up"), ("P2", "down"), ("P3", "down")]:
         ns = models.CNodeStats(oq_job=job1,
                                node=node,
                                current_status=status)
         ns.save(using="job_superv")
     job2 = engine.prepare_job()
     expected = {}
     for node, status in [("Q2", "down"), ("Q3", "down")]:
         ns = models.CNodeStats(oq_job=job2,
                                node=node,
                                current_status=status)
         ns.save(using="job_superv")
         expected[node] = ns
     self.assertEqual(expected, monitor._db_cnode_status(job2.id))
Example #6
0
    def test_pre_execute_check_imts_raises(self):
        haz_job = engine.prepare_job()
        cfg = helpers.get_data_path('classical_job.ini')
        params, files = engine.parse_config(open(cfg, 'r'))
        haz_job.hazard_calculation = engine.create_hazard_calculation(
            haz_job.owner, params, files.values())
        haz_job.save()

        hazard_curve_output = models.Output.objects.create_output(
            haz_job, 'test_hazard_curve', 'hazard_curve'
        )
        models.HazardCurve.objects.create(
            output=hazard_curve_output,
            investigation_time=50.0,
            imt='PGV',  # the vulnerability model only defines SA(0.1)
            statistics='mean'
        )

        cfg = helpers.get_data_path(
            'end-to-end-hazard-risk/job_risk_classical.ini')
        risk_job = helpers.get_risk_job(
            cfg, hazard_output_id=hazard_curve_output.id
        )
        models.JobStats.objects.create(oq_job=risk_job)
        calc = classical.ClassicalRiskCalculator(risk_job)

        # Check for compatibility between the IMTs defined in the vulnerability
        # model and the chosen hazard output (--hazard-output-id)
        with self.assertRaises(ValueError) as ar:
            calc.pre_execute()
        self.assertEqual(
            "There is no hazard output for: SA(0.1). "
            "The available IMTs are: PGA.",
            ar.exception.message)
Example #7
0
def get_job(cfg, username="******", hazard_calculation_id=None,
            hazard_output_id=None, **extras):
    """
    Given a path to a config file and a hazard_calculation_id
    (or, alternatively, a hazard_output_id, create a
    :class:`openquake.engine.db.models.OqJob` object for a risk calculation.
    """
    if hazard_calculation_id is None and hazard_output_id is None:
        return engine.job_from_file(cfg, username, 'error', [], **extras)

    job = engine.prepare_job(username)
    oqparam = readini.parse_config(
        open(cfg), hazard_calculation_id, hazard_output_id)
    params = vars(oqparam)
    if hazard_calculation_id is None:
        params['hazard_calculation_id'] = models.Output.objects.get(
            pk=hazard_output_id).oq_job.id

    # we are removing intensity_measure_types_and_levels because it is not
    # a field of RiskCalculation; this ugliness will disappear when
    # RiskCalculation will be removed
    del params['intensity_measure_types_and_levels']
    job.save_params(params)
    risk_calc = engine.create_calculation(models.RiskCalculation, params)
    risk_calc = models.RiskCalculation.objects.get(id=risk_calc.id)
    job.risk_calculation = risk_calc
    job.save()
    return job
Example #8
0
    def test_prepare_job_explicit_log_level(self):
        # By default, a job is created with a log level of 'progress'
        # (just to show calculation progress).
        # In this test, we'll specify 'debug' as the log level.
        job = engine.prepare_job(log_level='debug')

        self.assertEqual('debug', job.log_level)
Example #9
0
    def test(self):
        # check that if risk models are provided, then the ``points to
        # compute`` and the imls are got from there

        username = helpers.default_user()

        job = engine.prepare_job(username)

        cfg = helpers.get_data_path("classical_job-sd-imt.ini")
        params = vars(readini.parse_config(open(cfg)))
        del params["hazard_calculation_id"]
        del params["hazard_output_id"]
        haz_calc = engine.create_calculation(models.HazardCalculation, params)
        haz_calc = models.HazardCalculation.objects.get(id=haz_calc.id)
        job.hazard_calculation = haz_calc
        job.is_running = True
        job.save()

        calc = get_calculator_class("hazard", job.hazard_calculation.calculation_mode)(job)
        calc.parse_risk_models()

        self.assertEqual(
            [(1.0, -1.0), (0.0, 0.0)], [(point.latitude, point.longitude) for point in haz_calc.points_to_compute()]
        )
        self.assertEqual(["PGA"], haz_calc.get_imts())

        self.assertEqual(3, haz_calc.oqjob.exposuremodel.exposuredata_set.count())

        return job
Example #10
0
    def setup_classic_job(cls, create_job_path=True, inputs=None,
                          force_inputs=False, omit_profile=False,
                          user_name="openquake"):
        """Create a classic job with associated upload and inputs.

        :param bool create_job_path: if set the path for the job will be
            created and captured in the job record
        :param list inputs: a list of 2-tuples where the first and the second
            element are the input type and path respectively
        :param bool force_inputs: If `True` the model input files will be
            parsed and the resulting content written to the database no matter
            what.
        :param bool omit_profile: If `True` no job profile will be created.
        :param str user_name: The name of the user that is running the job.
        :returns: a :py:class:`db.models.OqJob` instance
        """
        job = engine.prepare_job(user_name)
        if not omit_profile:
            oqjp = cls.setup_job_profile(job, force_inputs)
            models.Job2profile(oq_job=job, oq_job_profile=oqjp).save()

        # Insert input model files
        if inputs:
            insert_inputs(job, inputs)

        if create_job_path:
            job.path = os.path.join(tempfile.mkdtemp(), str(job.id))
            job.save()

            os.mkdir(job.path)
            os.chmod(job.path, 0777)

        return job
Example #11
0
def import_gmf_scenario(fileobj):
    """
    Parse the file with the GMF fields and import it into the table
    gmf_scenario. It also creates a new output record, unrelated to a job.
    Works both with XML files and tab-separated files with format
    (imt, gmvs, location).
    :returns: the generated :class:`openquake.engine.db.models.Output` object
    and the generated :class:`openquake.engine.db.models.OqJob`
    object.
    """
    t0 = time.time()
    fname = fileobj.name

    job = engine.prepare_job()

    ses_coll, gmf_coll = create_ses_gmf(job, fname)
    imts, tags, rows = read_data(fileobj)
    import_rows(job, ses_coll, gmf_coll, tags, rows)
    job.save_params(
        dict(
            base_path=os.path.dirname(fname),
            description='Scenario importer, file %s' % os.path.basename(fname),
            calculation_mode='scenario',
            intensity_measure_types_and_levels=dict.fromkeys(imts),
            inputs={},
            number_of_ground_motion_fields=len(rows) // len(imts)
            ))

    job.duration = time.time() - t0
    job.status = 'complete'
    job.save()
    return gmf_coll.output
Example #12
0
    def test_prepare_job_explicit_log_level(self):
        # By default, a job is created with a log level of 'progress'
        # (just to show calculation progress).
        # In this test, we'll specify 'debug' as the log level.
        job = engine.prepare_job(log_level='debug')

        self.assertEqual('debug', job.log_level)
Example #13
0
def get_risk_job(cfg, username=None, hazard_calculation_id=None,
                 hazard_output_id=None):
    """
    Given a path to a config file and a hazard_calculation_id
    (or, alternatively, a hazard_output_id, create a
    :class:`openquake.engine.db.models.OqJob` object for a risk calculation.
    """
    username = username if username is not None else default_user().user_name

    # You can't specify both a hazard output and hazard calculation
    # Pick one
    assert not (hazard_calculation_id is not None
                and hazard_output_id is not None)

    job = engine.prepare_job(username)
    params, files = engine.parse_config(open(cfg, 'r'))

    params.update(
        dict(hazard_output_id=hazard_output_id,
             hazard_calculation_id=hazard_calculation_id)
    )

    risk_calc = engine.create_risk_calculation(
        job.owner, params, files)
    risk_calc = models.RiskCalculation.objects.get(id=risk_calc.id)
    job.risk_calculation = risk_calc
    job.save()
    return job
Example #14
0
    def test_validate_warns(self):
        # Test that `validate` raises warnings if unnecessary parameters are
        # specified for a given calculation.
        # For example, `ses_per_logic_tree_path` is an event-based hazard
        # param; if this param is specified for a classical hazard job, a
        # warning should be raised.
        cfg_file = helpers.get_data_path('simple_fault_demo_hazard/job.ini')
        job = engine.prepare_job()
        params = engine.parse_config(open(cfg_file, 'r'))
        # Add a few superfluous parameters:
        params['ses_per_logic_tree_path'] = 5
        params['ground_motion_correlation_model'] = 'JB2009'
        calculation = engine.create_calculation(models.HazardCalculation,
                                                params)
        job.hazard_calculation = calculation
        job.save()

        with warnings.catch_warnings(record=True) as w:
            validation.validate(job, 'hazard', params, ['xml'])

        expected_warnings = [
            "Unknown parameter '%s' for calculation mode 'classical'."
            " Ignoring." % x for x in ('ses_per_logic_tree_path',
                                       'ground_motion_correlation_model')
        ]

        actual_warnings = [m.message.message for m in w]
        self.assertEqual(sorted(expected_warnings), sorted(actual_warnings))
Example #15
0
    def test_pre_execute_check_imts_no_errors(self):
        haz_job = engine.prepare_job()

        cfg = helpers.get_data_path(
            'end-to-end-hazard-risk/job_haz_classical.ini')
        params, files = engine.parse_config(open(cfg, 'r'))
        haz_job.hazard_calculation = engine.create_hazard_calculation(
            haz_job.owner, params, files.values())
        haz_job.save()

        hazard_curve_output = models.Output.objects.create_output(
            haz_job, 'test_hazard_curve', 'hazard_curve'
        )
        models.HazardCurve.objects.create(
            output=hazard_curve_output,
            investigation_time=50.0,
            # this imt is compatible with the vuln model
            imt='SA',
            sa_period=0.025,
            sa_damping=5.0,
            statistics='mean'
        )

        cfg = helpers.get_data_path(
            'end-to-end-hazard-risk/job_risk_classical.ini')
        risk_job = helpers.get_risk_job(
            cfg, hazard_output_id=hazard_curve_output.id
        )
        models.JobStats.objects.create(oq_job=risk_job)
        calc = classical.ClassicalRiskCalculator(risk_job)

        # In contrast to the test above (`test_pre_execute_check_imts_raises`),
        # we expect no errors to be raised.
        calc.pre_execute()
Example #16
0
 def testEnginePerformanceMonitorNoTask(self):
     job = engine.prepare_job()
     operation = uuid.uuid1()
     with EnginePerformanceMonitor(operation, job.id) as pmon:
         pass
     self.check_result(pmon, nproc=2)
     records = Performance.objects.filter(operation=operation)
     self.assertEqual(len(records), 1)
Example #17
0
 def test__db_cnode_status_and_wrong_job_id(self):
     job = engine.prepare_job()
     expected = {}
     for node, status in [("O1", "up"), ("O2", "down"), ("O3", "down")]:
         ns = models.CNodeStats(oq_job=job, node=node,
                                current_status=status)
         ns.save(using="job_init")
     self.assertEqual(expected, monitor._db_cnode_status(-1))
Example #18
0
 def test__db_cnode_status_and_wrong_job_id(self):
     job = engine.prepare_job()
     expected = {}
     for node, status in [("O1", "up"), ("O2", "down"), ("O3", "down")]:
         ns = models.CNodeStats(oq_job=job,
                                node=node,
                                current_status=status)
         ns.save(using="job_superv")
     self.assertEqual(expected, monitor._db_cnode_status(-1))
Example #19
0
 def test__db_cnode_status(self):
     job = engine.prepare_job()
     expected = {}
     for node, status in [("N1", "up"), ("N2", "down"), ("N3", "down")]:
         ns = models.CNodeStats(oq_job=job, node=node,
                                current_status=status)
         ns.save(using="job_init")
         expected[node] = ns
     self.assertEqual(expected, monitor._db_cnode_status(job.id))
Example #20
0
 def test__db_cnode_status(self):
     job = engine.prepare_job()
     expected = {}
     for node, status in [("N1", "up"), ("N2", "down"), ("N3", "down")]:
         ns = models.CNodeStats(oq_job=job,
                                node=node,
                                current_status=status)
         ns.save(using="job_superv")
         expected[node] = ns
     self.assertEqual(expected, monitor._db_cnode_status(job.id))
 def test_engine_performance_monitor_no_task(self):
     job = engine.prepare_job()
     operation = str(uuid.uuid1())
     with EnginePerformanceMonitor(
             operation, job.id, profile_pgmem=True) as pmon:
         pass
     self._check_result(pmon, nproc=2)
     flush()
     records = Performance.objects.filter(operation=operation)
     self.assertEqual(len(records), 1)
Example #22
0
 def test_engine_performance_monitor_no_task(self):
     job = engine.prepare_job()
     operation = str(uuid.uuid1())
     with EnginePerformanceMonitor(operation, job.id,
                                   profile_pgmem=True) as pmon:
         pass
     self._check_result(pmon)
     flush()
     records = Performance.objects.filter(operation=operation)
     self.assertEqual(len(records), 1)
Example #23
0
 def testEnginePerformanceMonitor(self):
     job = engine.prepare_job()
     mock_task = mock.Mock()
     mock_task.__name__ = 'mock_task'
     mock_task.request.id = task_id = uuid.uuid1()
     with EnginePerformanceMonitor('test', job.id, mock_task) as pmon:
         pass
     self.check_result(pmon, nproc=2)
     # check that one record was stored on the db, as it should
     self.assertEqual(len(Performance.objects.filter(task_id=task_id)), 1)
Example #24
0
    def test_read_and_validate_hazard_config(self):
        cfg = helpers.get_data_path('simple_fault_demo_hazard/job.ini')
        job = engine.prepare_job(getpass.getuser())
        params, files = engine.parse_config(open(cfg, 'r'))
        calculation = engine.create_hazard_calculation(
            job.owner, params, files.values())

        form = validation.ClassicalHazardForm(
            instance=calculation, files=files
        )
        self.assertTrue(form.is_valid())
Example #25
0
    def test_prepare_job_default_user(self):
        job = engine.prepare_job()

        self.assertEqual('openquake', job.user_name)
        self.assertEqual('pre_executing', job.status)
        self.assertEqual('progress', job.log_level)

        # Check the make sure it's in the database.
        try:
            models.OqJob.objects.get(id=job.id)
        except exceptions.ObjectDoesNotExist:
            self.fail('Job was not found in the database')
Example #26
0
    def test_prepare_job_default_user(self):
        job = engine.prepare_job()

        self.assertEqual('openquake', job.user_name)
        self.assertEqual('pre_executing', job.status)
        self.assertEqual('progress', job.log_level)

        # Check the make sure it's in the database.
        try:
            models.OqJob.objects.get(id=job.id)
        except exceptions.ObjectDoesNotExist:
            self.fail('Job was not found in the database')
Example #27
0
    def test_prepare_job_specified_user(self):
        user_name = helpers.random_string()
        job = engine.prepare_job(user_name=user_name)

        self.assertEqual(user_name, job.user_name)
        self.assertEqual('pre_executing', job.status)
        self.assertEqual('progress', job.log_level)

        try:
            models.OqJob.objects.get(id=job.id)
        except exceptions.ObjectDoesNotExist:
            self.fail('Job was not found in the database')
 def test_engine_performance_monitor(self):
     job = engine.prepare_job()
     mock_task = mock.Mock()
     mock_task.__name__ = 'mock_task'
     mock_task.request.id = task_id = str(uuid.uuid1())
     with EnginePerformanceMonitor(
             'test', job.id, mock_task, profile_pgmem=True) as pmon:
         pass
     self._check_result(pmon)
     # check that one record was stored on the db, as it should
     flush()
     self.assertEqual(len(Performance.objects.filter(task_id=task_id)), 1)
Example #29
0
    def test_prepare_job_specified_user(self):
        user_name = helpers.random_string()
        job = engine.prepare_job(user_name=user_name)

        self.assertEqual(user_name, job.user_name)
        self.assertEqual('pre_executing', job.status)
        self.assertEqual('progress', job.log_level)

        try:
            models.OqJob.objects.get(id=job.id)
        except exceptions.ObjectDoesNotExist:
            self.fail('Job was not found in the database')
def import_gmf_scenario(fileobj):
    """
    Parse the file with the GMF fields and import it into the table
    gmf_scenario. It also creates a new output record, unrelated to a job.
    Works both with XML files and tab-separated files with format
    (imt, gmvs, location).
    :returns: the generated :class:`openquake.engine.db.models.Output` object
    and the generated :class:`openquake.engine.db.models.HazardCalculation`
    object.
    """
    t0 = time.time()
    fname = fileobj.name

    job = engine.prepare_job()
    hc = models.HazardCalculation.objects.create(
        base_path=os.path.dirname(fname),
        description='Scenario importer, file %s' % os.path.basename(fname),
        calculation_mode='scenario',
        maximum_distance=100,
        intensity_measure_types_and_levels={},
        inputs={},
    )
    # XXX: probably the maximum_distance should be entered by the user

    out = models.Output.objects.create(
        oq_job=job,
        display_name='Imported from %r' % fname,
        output_type='gmf_scenario')

    gmf_coll = models.Gmf.objects.create(output=out)

    rows = []
    if fname.endswith('.xml'):
        # convert the XML into a tab-separated StringIO
        for imt, gmvs, loc in GMFScenarioParser(fileobj).parse():
            hc.intensity_measure_types_and_levels[imt] = []
            imt_type, sa_period, sa_damping = from_string(imt)
            sa_period = '\N' if sa_period is None else str(sa_period)
            sa_damping = '\N' if sa_damping is None else str(sa_damping)
            gmvs = '{%s}' % str(gmvs)[1:-1]
            rows.append([imt_type, sa_period, sa_damping, gmvs, loc])
    else:  # assume a tab-separated file
        for line in fileobj:
            rows.append(line.split('\t'))
    import_rows(hc, gmf_coll, rows)
    hc.number_of_ground_motion_fields = len(rows)
    hc.save()  # update intensity_measure_types_and_levels
    job.hazard_calculation = hc
    job.duration = time.time() - t0
    job.status = 'complete'
    job.save()
    return out
Example #31
0
    def test(self):
        # check that if risk models are provided, then the ``points to
        # compute`` and the imls are got from there

        username = helpers.default_user().user_name

        job = engine.prepare_job(username)

        cfg = helpers.get_data_path('classical_job-sd-imt.ini')
        params, files = engine.parse_config(open(cfg, 'r'))

        haz_calc = engine.create_hazard_calculation(
            job.owner.user_name, params, files)
        haz_calc = models.HazardCalculation.objects.get(id=haz_calc.id)
        job.hazard_calculation = haz_calc
        job.is_running = True
        job.save()

        base_path = ('openquake.engine.calculators.hazard.classical.core'
                     '.ClassicalHazardCalculator')
        init_src_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_sources'))
        init_sm_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_site_model'))
        init_rlz_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_realizations'))
        record_stats_patch = helpers.patch(
            '%s.%s' % (base_path, 'record_init_stats'))
        init_pr_data_patch = helpers.patch(
            '%s.%s' % (base_path, 'initialize_pr_data'))
        patches = (init_src_patch, init_sm_patch, init_rlz_patch,
                   record_stats_patch, init_pr_data_patch)

        mocks = [p.start() for p in patches]

        get_calculator_class(
            'hazard',
            job.hazard_calculation.calculation_mode)(job).pre_execute()

        self.assertEqual([(1.0, -1.0), (0.0, 0.0)],
                         [(point.latitude, point.longitude)
                          for point in haz_calc.points_to_compute()])
        self.assertEqual(['PGA'], haz_calc.get_imts())

        self.assertEqual(3, haz_calc.exposure_model.exposuredata_set.count())

        for i, m in enumerate(mocks):
            m.stop()
            patches[i].stop()

        return job
Example #32
0
    def test(self):
        # check that if risk models are provided, then the ``points to
        # compute`` and the imls are got from there

        username = helpers.default_user()

        job = engine.prepare_job(username)

        cfg = helpers.get_data_path('classical_job-sd-imt.ini')
        params = engine.parse_config(open(cfg, 'r'))

        haz_calc = engine.create_calculation(models.HazardCalculation, params)
        haz_calc = models.HazardCalculation.objects.get(id=haz_calc.id)
        job.hazard_calculation = haz_calc
        job.is_running = True
        job.save()

        base_path = ('openquake.engine.calculators.hazard.classical.core'
                     '.ClassicalHazardCalculator')
        init_src_patch = helpers.patch('%s.%s' %
                                       (base_path, 'initialize_sources'))
        init_sm_patch = helpers.patch('%s.%s' %
                                      (base_path, 'initialize_site_model'))
        init_rlz_patch = helpers.patch('%s.%s' %
                                       (base_path, 'initialize_realizations'))
        record_stats_patch = helpers.patch('%s.%s' %
                                           (base_path, 'record_init_stats'))
        init_pr_data_patch = helpers.patch('%s.%s' %
                                           (base_path, 'initialize_pr_data'))
        patches = (init_src_patch, init_sm_patch, init_rlz_patch,
                   record_stats_patch, init_pr_data_patch)

        mocks = [p.start() for p in patches]

        get_calculator_class(
            'hazard',
            job.hazard_calculation.calculation_mode)(job).pre_execute()

        self.assertEqual([(1.0, -1.0), (0.0, 0.0)],
                         [(point.latitude, point.longitude)
                          for point in haz_calc.points_to_compute()])
        self.assertEqual(['PGA'], haz_calc.get_imts())

        self.assertEqual(3,
                         haz_calc.oqjob.exposuremodel.exposuredata_set.count())

        for i, m in enumerate(mocks):
            m.stop()
            patches[i].stop()

        return job
Example #33
0
 def test_engine_performance_monitor(self):
     job = engine.prepare_job()
     mock_task = mock.Mock()
     mock_task.__name__ = 'mock_task'
     mock_task.request.id = task_id = str(uuid.uuid1())
     with EnginePerformanceMonitor('test',
                                   job.id,
                                   mock_task,
                                   profile_pgmem=True) as pmon:
         pass
     self._check_result(pmon)
     # check that one record was stored on the db, as it should
     flush()
     self.assertEqual(len(Performance.objects.filter(task_id=task_id)), 1)
Example #34
0
def get_hazard_job(cfg, username=None):
    """
    Given a path to a config file, create a
    :class:`openquake.engine.db.models.OqJob` object for a hazard calculation.
    """
    username = username if username is not None else default_user().user_name

    job = engine.prepare_job(username)
    params, files = engine.parse_config(open(cfg, 'r'))
    haz_calc = engine.create_hazard_calculation(
        job.owner, params, files.values())
    haz_calc = models.HazardCalculation.objects.get(id=haz_calc.id)
    job.hazard_calculation = haz_calc
    job.save()
    return job
Example #35
0
def get_job(cfg, username="******", hazard_calculation_id=None,
            hazard_output_id=None):
    """
    Given a path to a config file and a hazard_calculation_id
    (or, alternatively, a hazard_output_id, create a
    :class:`openquake.engine.db.models.OqJob` object for a risk calculation.
    """
    if hazard_calculation_id is None and hazard_output_id is None:
        return engine.job_from_file(cfg, username, 'error', [])

    job = engine.prepare_job(username)
    params = vars(readini.parse_config(
            open(cfg), hazard_calculation_id, hazard_output_id))
    risk_calc = engine.create_calculation(models.RiskCalculation, params)
    risk_calc = models.RiskCalculation.objects.get(id=risk_calc.id)
    job.risk_calculation = risk_calc
    job.save()
    return job
Example #36
0
 def setUpClass(cls):
     cls.job = engine.prepare_job()
Example #37
0
 def setUpClass(cls):
     cls.job = engine.prepare_job()
Example #38
0
 def setUp(self):
     self.job = engine.prepare_job()
def import_hazard_curves(fileobj):
    """
    Parse the file with the hazard curves and import it into the tables
    hazard_curve and hazard_curve_data. It also creates a new output record,
    unrelated to a job.

    :param fileobj:
        a file-like object associated to an XML file
    :returns:
        the generated :class:`openquake.engine.db.models.Output` object
        and the generated :class:`openquake.engine.db.models.HazardCalculation`
        object.
    """
    fname = fileobj.name
    curs = connections['job_init'].cursor().cursor.cursor  # DB API cursor
    job = engine.prepare_job()
    hc = models.HazardCalculation.objects.create(
        base_path=os.path.dirname(fname),
        description='HazardCurve importer, file %s' % os.path.basename(fname),
        calculation_mode='classical', maximum_distance=100)
    # XXX: what about the maximum_distance?

    out = models.Output.objects.create(
        display_name='Imported from %r' % fname, output_type='hazard_curve',
        oq_job=job)

    f = StringIO()
    # convert the XML into a tab-separated StringIO
    hazcurve = HazardCurveXMLParser(fileobj).parse()
    haz_curve = models.HazardCurve.objects.create(
        investigation_time=hazcurve.investigation_time,
        imt=hazcurve.imt,
        imls=hazcurve.imls,
        quantile=hazcurve.quantile_value,
        statistics=hazcurve.statistics,
        sa_damping=hazcurve.sa_damping,
        sa_period=hazcurve.sa_period,
        output=out)
    hazard_curve_id = str(haz_curve.id)
    for node in hazcurve:
        loc = node.location
        poes = node.poes
        poes = '{%s}' % str(poes)[1:-1]
        print >> f, '\t'.join([hazard_curve_id, poes,
                               'SRID=4326;POINT(%s %s)' % (loc.x, loc.y)])
    f.reset()
    ## import the file-like object with a COPY FROM
    try:
        curs.copy_expert(
            'copy hzrdr.hazard_curve_data (hazard_curve_id, poes, location) '
            'from stdin', f)
    except:
        curs.connection.rollback()
        raise
    else:
        curs.connection.commit()
    finally:
        f.close()
    job.hazard_calculation = hc
    job.save()
    return out
Example #40
0
 def setUp(self):
     self.job = engine.prepare_job()
     self.job.hazard_calculation = HazardCalculation(no_progress_timeout=99)
Example #41
0
def get_fake_risk_job(risk_cfg, hazard_cfg, output_type="curve",
                      username="******"):
    """
    Takes in input the paths to a risk job config file and a hazard job config
    file.

    Creates fake hazard outputs suitable to be used by a risk
    calculation and then creates a :class:`openquake.engine.db.models.OqJob`
    object for a risk calculation. It also returns the input files
    referenced by the risk config file.

    :param output_type: gmf, gmf_scenario, or curve
    """

    hazard_job = get_job(hazard_cfg, username)
    hc = hazard_job.hazard_calculation

    lt_model = models.LtSourceModel.objects.create(
        hazard_calculation=hazard_job.hazard_calculation,
        ordinal=1, sm_lt_path="test_sm")

    rlz = models.LtRealization.objects.create(
        lt_model=lt_model, ordinal=1, weight=1,
        gsim_lt_path="test_gsim")

    if output_type == "curve":
        models.HazardCurve.objects.create(
            lt_realization=rlz,
            output=models.Output.objects.create_output(
                hazard_job, "Test Hazard output", "hazard_curve_multi"),
            investigation_time=hc.investigation_time)

        hazard_output = models.HazardCurve.objects.create(
            lt_realization=rlz,
            output=models.Output.objects.create_output(
                hazard_job, "Test Hazard output", "hazard_curve"),
            investigation_time=hc.investigation_time,
            imt="PGA", imls=[0.1, 0.2, 0.3])

        for point in ["POINT(-1.01 1.01)", "POINT(0.9 1.01)",
                      "POINT(0.01 0.01)", "POINT(0.9 0.9)"]:
            models.HazardSite.objects.create(
                hazard_calculation=hc, location=point)
            models.HazardCurveData.objects.create(
                hazard_curve=hazard_output,
                poes=[0.1, 0.2, 0.3],
                location="%s" % point)

    elif output_type == "gmf_scenario":
        hazard_output = models.Gmf.objects.create(
            output=models.Output.objects.create_output(
                hazard_job, "Test gmf scenario output", "gmf_scenario"))

        models.SESCollection.objects.create(
            output=models.Output.objects.create_output(
                hazard_job, "Test SES Collection", "ses"),
            lt_model=None, ordinal=0)
        site_ids = hazard_job.hazard_calculation.save_sites(
            [(15.48, 38.0900001), (15.565, 38.17), (15.481, 38.25)])
        for site_id in site_ids:
            models.GmfData.objects.create(
                gmf=hazard_output,
                task_no=0,
                imt="PGA",
                site_id=site_id,
                gmvs=[0.1, 0.2, 0.3],
                rupture_ids=[0, 1, 2])

    elif output_type in ("ses", "gmf"):
        hazard_output = create_gmf_data_records(hazard_job, rlz)[0].gmf

    else:
        raise RuntimeError('Unexpected output_type: %s' % output_type)

    hazard_job.status = "complete"
    hazard_job.save()
    job = engine.prepare_job(username)
    params = vars(readini.parse_config(open(risk_cfg),
                                       hazard_output_id=hazard_output.output.id))

    risk_calc = engine.create_calculation(models.RiskCalculation, params)
    job.risk_calculation = risk_calc
    job.save()

    # reload risk calculation to have all the types converted properly
    job.risk_calculation = models.RiskCalculation.objects.get(id=risk_calc.id)

    return job, set(params['inputs'])
Example #42
0
def get_fake_risk_job(risk_cfg, hazard_cfg, output_type="curve",
                      username=None):
    """
    Takes in input the paths to a risk job config file and a hazard job config
    file.

    Creates fake hazard outputs suitable to be used by a risk
    calculation and then creates a :class:`openquake.engine.db.models.OqJob`
    object for a risk calculation. It also returns the input files
    referenced by the risk config file.

    :param output_type: gmf, gmf_scenario, or curve
    """
    username = username if username is not None else default_user().user_name

    hazard_job = get_hazard_job(hazard_cfg, username)
    hc = hazard_job.hazard_calculation

    rlz = models.LtRealization.objects.create(
        hazard_calculation=hazard_job.hazard_calculation,
        ordinal=1, seed=1, weight=None,
        sm_lt_path="test_sm", gsim_lt_path="test_gsim",
        is_complete=False, total_items=1, completed_items=1)
    if output_type == "curve":
        models.HazardCurve.objects.create(
            lt_realization=rlz,
            output=models.Output.objects.create_output(
                hazard_job, "Test Hazard output", "hazard_curve_multi"),
            investigation_time=hc.investigation_time)

        hazard_output = models.HazardCurve.objects.create(
            lt_realization=rlz,
            output=models.Output.objects.create_output(
                hazard_job, "Test Hazard output", "hazard_curve"),
            investigation_time=hc.investigation_time,
            imt="PGA", imls=[0.1, 0.2, 0.3])

        for point in ["POINT(-1.01 1.01)", "POINT(0.9 1.01)",
                      "POINT(0.01 0.01)", "POINT(0.9 0.9)"]:
            models.HazardCurveData.objects.create(
                hazard_curve=hazard_output,
                poes=[0.1, 0.2, 0.3],
                location="%s" % point)

    elif output_type == "gmf_scenario":
        hazard_output = models.Gmf.objects.create(
            output=models.Output.objects.create_output(
                hazard_job, "Test gmf scenario output", "gmf_scenario"))

        site_ids = hazard_job.hazard_calculation.save_sites(
            [(15.48, 38.0900001), (15.565, 38.17), (15.481, 38.25)])
        for site_id in site_ids:
            models.GmfData.objects.create(
                gmf=hazard_output,
                imt="PGA",
                site_id=site_id,
                gmvs=[0.1, 0.2, 0.3])

    else:
        hazard_output = create_gmf_data_records(
            hazard_job, rlz)[0].gmf

    hazard_job.status = "complete"
    hazard_job.save()
    job = engine.prepare_job(username)
    params, files = engine.parse_config(open(risk_cfg, 'r'))

    params.update(dict(hazard_output_id=hazard_output.output.id))

    risk_calc = engine.create_risk_calculation(job.owner, params, files)
    job.risk_calculation = risk_calc
    job.save()
    error_message = validate(job, 'risk', params, files, [])

    # reload risk calculation to have all the types converted properly
    job.risk_calculation = models.RiskCalculation.objects.get(id=risk_calc.id)
    if error_message:
        raise RuntimeError(error_message)
    return job, files
Example #43
0
 def setUp(self):
     self.job = engine.prepare_job()
     self.job.hazard_calculation = HazardCalculation(no_progress_timeout=99)
Example #44
0
 def setUp(self):
     self.job = engine.prepare_job()