コード例 #1
0
    def test_pk_set_with_non_existent_predef_key(self):
        """`KeyError` is raised for keys that do not exist in `STATS_KEYS`."""
        job_id = 73
        pkey = "To be or not to be!?"
        stats.delete_job_counters(job_id)

        self.assertRaises(KeyError, stats.pk_set, job_id, pkey, 737)
コード例 #2
0
    def test_pk_inc_with_non_existent_predef_key(self):
        """`KeyError` is raised for keys that do not exist in `STATS_KEYS`."""
        job_id = 83
        pkey = "That is the question.."
        stats.delete_job_counters(job_id)

        self.assertRaises(KeyError, stats.pk_inc, job_id, pkey)
コード例 #3
0
    def test_pk_inc_with_non_existent_predef_key(self):
        """`KeyError` is raised for keys that do not exist in `STATS_KEYS`."""
        job_id = 83
        pkey = "That is the question.."
        stats.delete_job_counters(job_id)

        self.assertRaises(KeyError, stats.pk_inc, job_id, pkey)
コード例 #4
0
ファイル: supervisor_test.py プロジェクト: matley/oq-engine
    def test_actions_after_job_process_failures(self):
        # the job process is running but has some failure counters above zero
        # shorten the delay to checking failure counters
        supervisor.SupervisorLogMessageConsumer.FCC_DELAY = 2
        self.is_pid_running.return_value = True
        self.get_job_status.return_value = 'running'

        stats.delete_job_counters(self.job.id)
        stats.incr_counter(self.job.id, "h", "a-failures")
        stats.incr_counter(self.job.id, "r", "b-failures")
        stats.incr_counter(self.job.id, "r", "b-failures")
        supervisor.supervise(1, self.job.id, timeout=0.1)

        # the job process is terminated
        self.assertEqual(1, self.terminate_job.call_count)
        self.assertEqual(((1,), {}), self.terminate_job.call_args)

        # stop time is recorded
        self.assertEqual(1, self.record_job_stop_time.call_count)
        self.assertEqual(
            ((self.job.id,), {}),
            self.record_job_stop_time.call_args)

        # the cleanup is triggered
        self.assertEqual(1, self.cleanup_after_job.call_count)
        self.assertEqual(
            ((self.job.id,), {}),
            self.cleanup_after_job.call_args)
コード例 #5
0
ファイル: __init__.py プロジェクト: dsg101/openquake
def prepare_job(params):
    """
    Create a new OqJob and fill in the related OpParams entry.

    Returns the newly created job object.
    """
    # TODO specify the owner as a command line parameter
    owner = OqUser.objects.get(user_name='openquake')

    input_set = InputSet(upload=None, owner=owner)
    input_set.save()

    job_type = CALCULATION_MODE[params['CALCULATION_MODE']]
    job = OqJob(owner=owner, path=None, job_type=job_type)

    oqp = OqParams(input_set=input_set)
    oqp.job_type = job_type

    _insert_input_files(params, input_set)
    _store_input_parameters(params, job_type, oqp)

    oqp.save()

    job.oq_params = oqp
    job.save()

    # Reset all progress indication counters for the job at hand.
    stats.delete_job_counters(job.id)

    return job
コード例 #6
0
    def test_pk_set_with_non_existent_predef_key(self):
        """`KeyError` is raised for keys that do not exist in `STATS_KEYS`."""
        job_id = 73
        pkey = "To be or not to be!?"
        stats.delete_job_counters(job_id)

        self.assertRaises(KeyError, stats.pk_set, job_id, pkey, 737)
コード例 #7
0
 def test_pk_inc_with_non_existent_debug_key(self):
     """`KeyError` is raised for debug keys that are not in `STATS_KEYS`."""
     job_id = 86
     pkey = "How hard can it be!?"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = False
         self.assertRaises(KeyError, stats.pk_inc, job_id, pkey)
コード例 #8
0
 def test_pk_inc_with_non_existent_debug_key(self):
     """`KeyError` is raised for debug keys that are not in `STATS_KEYS`."""
     job_id = 86
     pkey = "How hard can it be!?"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = False
         self.assertRaises(KeyError, stats.pk_inc, job_id, pkey)
コード例 #9
0
 def test_serializer_aborts_on_failure(self):
     # The task function used here raises an exception, the serializer
     # should abort on that failure.
     stats.delete_job_counters(self.job_ctxt.job_id)
     try:
         self.calculator.do_curves(self.sites, 2, self.calculator.ath,
                                   fake_compute_hazard_curve)
     except RuntimeError, err:
         self.assertTrue("h/nhzrd:failed/i" in err.args[0])
コード例 #10
0
 def test_serializer_aborts_on_failure(self):
     # The task function used here raises an exception, the serializer
     # should abort on that failure.
     stats.delete_job_counters(self.job_ctxt.job_id)
     try:
         self.calculator.do_curves(
             self.sites, 2, self.calculator.ath, fake_compute_hazard_curve)
     except RuntimeError, err:
         self.assertTrue("h/nhzrd:failed/i" in err.args[0])
コード例 #11
0
    def test_pk_inc_with_existing_incremental(self):
        """The value is incremented for an existing predefined key."""
        job_id = 82
        pkey = "cblock"
        key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])

        stats.delete_job_counters(job_id)
        kvs = self.connect()
        stats.pk_inc(job_id, pkey)
        self.assertEqual("1", kvs.get(key))
コード例 #12
0
    def test_pk_set_with_existing_total(self):
        """The value is set correctly for an existing predefined key."""
        job_id = 71
        pkey = "blocks"
        key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])

        stats.delete_job_counters(job_id)
        kvs = self.connect()
        stats.pk_set(job_id, pkey, 717)
        self.assertEqual("717", kvs.get(key))
コード例 #13
0
    def test_pk_set_with_existing_total(self):
        """The value is set correctly for an existing predefined key."""
        job_id = 71
        pkey = "blocks"
        key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])

        stats.delete_job_counters(job_id)
        kvs = self.connect()
        stats.pk_set(job_id, pkey, 717)
        self.assertEqual("717", kvs.get(key))
コード例 #14
0
    def test_pk_inc_with_existing_incremental(self):
        """The value is incremented for an existing predefined key."""
        job_id = 82
        pkey = "cblock"
        key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])

        stats.delete_job_counters(job_id)
        kvs = self.connect()
        stats.pk_inc(job_id, pkey)
        self.assertEqual("1", kvs.get(key))
コード例 #15
0
 def test_delete_job_counters_deletes_counters_for_job(self):
     """
     The progress indication counters for a given job are deleted.
     """
     kvs = self.connect()
     args = [(55, "h", "a/b/c"), (55, "h", "d/e/f")]
     for data in args:
         stats.incr_counter(*data)
     stats.delete_job_counters(55)
     self.assertEqual(0, len(kvs.keys("oqs:55:*")))
コード例 #16
0
 def test_delete_job_counters_deletes_counters_for_job(self):
     """
     The progress indication counters for a given job are deleted.
     """
     kvs = self.connect()
     args = [(55, "h", "a/b/c"), (55, "h", "d/e/f")]
     for data in args:
         stats.incr_counter(*data)
     stats.delete_job_counters(55)
     self.assertEqual(0, len(kvs.keys("oqs:55:*")))
コード例 #17
0
 def test_pk_get_with_existing_debug_and_debug_stats_off(self):
     """`None` is returned when debug stats are off."""
     job_id = 95
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = False
         key = stats._KEY_TEMPLATE % ((job_id,) + stats.STATS_KEYS[pkey])
         kvs = self.connect()
         kvs.set(key, 959)
         self.assertIs(None, stats.pk_get(job_id, pkey))
コード例 #18
0
    def test_pk_get_with_existing_incremental(self):
        """The correct value is obtained for an existing predefined key."""
        job_id = 92
        pkey = "cblock"
        key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])

        stats.delete_job_counters(job_id)
        kvs = self.connect()
        kvs.set(key, 929)
        stats.pk_get(job_id, pkey)
        self.assertEqual("929", kvs.get(key))
コード例 #19
0
ファイル: utils_stats_test.py プロジェクト: pslh/oq-engine
 def test_pk_get_with_existing_debug_and_debug_stats_enabled(self):
     """The value is obtained correctly for an existing debug counter."""
     job_id = 94
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = True
         key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])
         kvs = self.connect()
         kvs.set(key, 949)
         self.assertEqual(949, stats.pk_get(job_id, pkey))
コード例 #20
0
ファイル: utils_stats_test.py プロジェクト: pslh/oq-engine
 def test_pk_get_with_existing_debug_and_debug_stats_off(self):
     """`None` is returned when debug stats are off."""
     job_id = 95
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = False
         key = stats._KEY_TEMPLATE % ((job_id, ) + stats.STATS_KEYS[pkey])
         kvs = self.connect()
         kvs.set(key, 959)
         self.assertIs(None, stats.pk_get(job_id, pkey))
コード例 #21
0
    def test_pk_get_with_existing_incremental(self):
        """The correct value is obtained for an existing predefined key."""
        job_id = 92
        pkey = "cblock"
        key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])

        stats.delete_job_counters(job_id)
        kvs = self.connect()
        kvs.set(key, 929)
        stats.pk_get(job_id, pkey)
        self.assertEqual("929", kvs.get(key))
コード例 #22
0
 def test_pk_get_with_existing_debug_and_debug_stats_enabled(self):
     """The value is obtained correctly for an existing debug counter."""
     job_id = 94
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = True
         key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])
         kvs = self.connect()
         kvs.set(key, 949)
         self.assertEqual(949, stats.pk_get(job_id, pkey))
コード例 #23
0
 def test_pk_inc_with_existing_debug_and_debug_stats_off(self):
     """
     The debug counter value is not incremented when debug stats are off.
     """
     job_id = 85
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = False
         stats.pk_inc(job_id, pkey)
         kvs = self.connect()
         key = stats._KEY_TEMPLATE % ((job_id,) + stats.STATS_KEYS[pkey])
         self.assertIs(None, kvs.get(key))
コード例 #24
0
ファイル: utils_stats_test.py プロジェクト: pslh/oq-engine
 def test_pk_inc_with_existing_debug_and_debug_stats_off(self):
     """
     The debug counter value is not incremented when debug stats are off.
     """
     job_id = 85
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = False
         stats.pk_inc(job_id, pkey)
         kvs = self.connect()
         key = stats._KEY_TEMPLATE % ((job_id, ) + stats.STATS_KEYS[pkey])
         self.assertIs(None, kvs.get(key))
コード例 #25
0
 def test_get_value_with_debug_stats_enabled_but_no_value(self):
     """
     `None` is returned for a debug counter if debug stats are enabled
     but the counter has no value.
     """
     args = (61, "h", "d/g/z", "d")
     stats.delete_job_counters(args[0])
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = True
         key = stats.key_name(*args)
         kvs = self.connect()
         self.assertIs(None, kvs.get(key))
         self.assertIs(None, stats.get_counter(*args))
コード例 #26
0
 def test_get_value_with_debug_stats_enabled_but_no_value(self):
     """
     `None` is returned for a debug counter if debug stats are enabled
     but the counter has no value.
     """
     args = (61, "h", "d/g/z", "d")
     stats.delete_job_counters(args[0])
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = True
         key = stats.key_name(*args)
         kvs = self.connect()
         self.assertIs(None, kvs.get(key))
         self.assertIs(None, stats.get_counter(*args))
コード例 #27
0
 def test_delete_job_counters_resets_counters(self):
     """
     The progress indication counters for a given job are reset.
     """
     kvs = self.connect()
     args = [(66, "h", "g/h/i", "i"), (66, "h", "j/k/l", "i")]
     for data in args:
         stats.incr_counter(*data[:-1])
     stats.delete_job_counters(66)
     # The counters have been reset, after incrementing we expect them all
     # to have a value of "1".
     for data in args:
         stats.incr_counter(*data[:-1])
         self.assertEqual("1", kvs.get(stats.key_name(*data)))
コード例 #28
0
ファイル: core.py プロジェクト: wmorales/oq-engine
    def post_execute(self):
        """Clean up stats counters and create XML output artifacts (if
        requested).
        """
        # TODO: export these counters to the database before deleting them
        # See bug https://bugs.launchpad.net/openquake/+bug/925946.
        stats.delete_job_counters(self.job_ctxt.job_id)

        if "xml" in self.job_ctxt.serialize_results_to:
            [uhs_output] = Output.objects.filter(oq_job=self.job_ctxt.oq_job.id, output_type="uh_spectra")

            target_dir = os.path.join(self.job_ctxt.params.get("BASE_PATH"), self.job_ctxt.params.get("OUTPUT_DIR"))

            export_uhs(uhs_output, target_dir)
コード例 #29
0
 def test_delete_job_counters_resets_counters(self):
     """
     The progress indication counters for a given job are reset.
     """
     kvs = self.connect()
     args = [(66, "h", "g/h/i", "i"), (66, "h", "j/k/l", "i")]
     for data in args:
         stats.incr_counter(*data[:-1])
     stats.delete_job_counters(66)
     # The counters have been reset, after incrementing we expect them all
     # to have a value of "1".
     for data in args:
         stats.incr_counter(*data[:-1])
         self.assertEqual("1", kvs.get(stats.key_name(*data)))
コード例 #30
0
ファイル: utils_stats_test.py プロジェクト: matley/oq-engine
    def test_failure_counters_with_no_area(self):
        # Failure counters are returned for all computation areas if the
        # 'area' parameter is omitted.
        stats.delete_job_counters(123)
        fcname = itertools.cycle(string.ascii_lowercase)
        for cidx, carea in enumerate(["g", "h", "r"]):
            stats.incr_counter(123, carea, "%s-failures" % fcname.next())
            if not (cidx % 2):
                stats.incr_counter(123, carea, "%s-failures" % fcname.next())

        self.assertEqual(
            [('oqs/123/g/a-failures/i', 1), ('oqs/123/g/b-failures/i', 1),
             ('oqs/123/h/c-failures/i', 1), ('oqs/123/r/d-failures/i', 1),
             ('oqs/123/r/e-failures/i', 1)],
            sorted(stats.failure_counters(123)))
コード例 #31
0
    def test_failure_counters_with_no_area(self):
        # Failure counters are returned for all computation areas if the
        # 'area' parameter is omitted.
        stats.delete_job_counters(123)
        fcname = itertools.cycle(string.ascii_lowercase)
        for cidx, carea in enumerate(["g", "h", "r"]):
            stats.incr_counter(123, carea, "%s:failed" % fcname.next())
            if not (cidx % 2):
                stats.incr_counter(123, carea, "%s:failed" % fcname.next())

        self.assertEqual([('oqs/123/g/a:failed/i', 1),
                          ('oqs/123/g/b:failed/i', 1),
                          ('oqs/123/h/c:failed/i', 1),
                          ('oqs/123/r/d:failed/i', 1),
                          ('oqs/123/r/e:failed/i', 1)],
                         sorted(stats.failure_counters(123)))
コード例 #32
0
ファイル: utils_stats_test.py プロジェクト: pslh/oq-engine
    def test_failure_counters_with_valid_area(self):
        # Failure counters are returned for valid computation areas.
        stats.delete_job_counters(123)
        fcname = itertools.cycle(string.ascii_lowercase)
        for cidx, carea in enumerate(["g", "h", "r"]):
            stats.incr_counter(123, carea, "%s-failures" % fcname.next())
            if not (cidx % 2):
                stats.incr_counter(123, carea, "%s-failures" % fcname.next())

        self.assertEqual([('oqs/123/g/a-failures/i', 1),
                          ('oqs/123/g/b-failures/i', 1)],
                         sorted(stats.failure_counters(123, "g")))
        self.assertEqual([('oqs/123/h/c-failures/i', 1)],
                         sorted(stats.failure_counters(123, "h")))
        self.assertEqual([('oqs/123/r/d-failures/i', 1),
                          ('oqs/123/r/e-failures/i', 1)],
                         sorted(stats.failure_counters(123, "r")))
コード例 #33
0
ファイル: core.py プロジェクト: bwyss/oq-engine
    def post_execute(self):
        """Clean up stats counters and create XML output artifacts (if
        requested).
        """
        # TODO: export these counters to the database before deleting them
        # See bug https://bugs.launchpad.net/openquake/+bug/925946.
        stats.delete_job_counters(self.job_ctxt.job_id)

        if 'xml' in self.job_ctxt.serialize_results_to:
            [uhs_output] = Output.objects.filter(
                oq_job=self.job_ctxt.oq_job.id,
                output_type='uh_spectra')

            target_dir = os.path.join(self.job_ctxt.params.get('BASE_PATH'),
                                      self.job_ctxt.params.get('OUTPUT_DIR'))

            export_uhs(uhs_output, target_dir)
コード例 #34
0
    def test_failure_counters_with_valid_area(self):
        # Failure counters are returned for valid computation areas.
        stats.delete_job_counters(123)
        fcname = itertools.cycle(string.ascii_lowercase)
        for cidx, carea in enumerate(["g", "h", "r"]):
            stats.incr_counter(123, carea, "%s:failed" % fcname.next())
            if not (cidx % 2):
                stats.incr_counter(123, carea, "%s:failed" % fcname.next())

        self.assertEqual(
            [('oqs/123/g/a:failed/i', 1), ('oqs/123/g/b:failed/i', 1)],
            sorted(stats.failure_counters(123, "g")))
        self.assertEqual([('oqs/123/h/c:failed/i', 1)],
                         sorted(stats.failure_counters(123, "h")))
        self.assertEqual(
            [('oqs/123/r/d:failed/i', 1), ('oqs/123/r/e:failed/i', 1)],
            sorted(stats.failure_counters(123, "r")))
コード例 #35
0
ファイル: supervisor_test.py プロジェクト: bwyss/oq-engine
    def test_actions_after_job_process_failures(self):
        # the job process is running but has some failure counters above zero
        # shorten the delay to checking failure counters
        supervisor.SupervisorLogMessageConsumer.FCC_DELAY = 2
        self.is_pid_running.return_value = True
        self.get_job_status.return_value = 'running'

        stats.delete_job_counters(123)
        stats.incr_counter(123, "h", "a:failed")
        stats.incr_counter(123, "r", "b:failed")
        stats.incr_counter(123, "r", "b:failed")
        supervisor.supervise(1, 123, timeout=0.1)

        # the job process is terminated
        self.assertEqual(1, self.terminate_job.call_count)
        self.assertEqual(((1,), {}), self.terminate_job.call_args)

        # stop time is recorded
        self.assertEqual(1, self.record_job_stop_time.call_count)
        self.assertEqual(((123,), {}), self.record_job_stop_time.call_args)

        # the cleanup is triggered
        self.assertEqual(1, self.cleanup_after_job.call_count)
        self.assertEqual(((123,), {}), self.cleanup_after_job.call_args)
コード例 #36
0
ファイル: engine.py プロジェクト: leoalvar/oq-engine
def run_job(job, params, sections, output_type='db', log_level='warn',
            force_inputs=False):
    """Given an :class:`openquake.db.models.OqJobProfile` object, create a new
    :class:`openquake.db.models.OqJob` object and run the job.

    NOTE: The params and sections parameters are temporary but will be required
    until we can run calculations purely using Django model objects as
    calculator input.

    Returns the calculation object when the calculation concludes.

    :param job:
        :class:`openquake.db.models.OqJob` instance
    :param params:
        A dictionary of config parameters parsed from the calculation
        config file.
    :param sections:
        A list of sections parsed from the calculation config file.
    :param output_type:
        'db' or 'xml' (defaults to 'db')
    :param str log_level:
        One of 'debug', 'info', 'warn', 'error', or 'critical'.
        Defaults to 'warn'.
    :param bool force_inputs: If `True` the model input files will be parsed
        and the resulting content written to the database no matter what.

    :returns:
        :class:`openquake.db.models.OqJob` instance.
    """
    if not output_type in ('db', 'xml'):
        raise RuntimeError("output_type must be 'db' or 'xml'")

    job.description = job.profile().description
    job.status = 'running'
    job.save()

    # Clear any counters for this job_id, prior to running the
    # job.
    # We do this just to make sure all of the counters behave properly and can
    # provide accurate data about a calculation in-progress.
    stats.delete_job_counters(job.id)

    # Make the job/calculation ID generally available.
    utils_config.Config().job_id = job.id

    serialize_results_to = ['db']
    if output_type == 'xml':
        serialize_results_to.append('xml')

    job_ctxt = JobContext(params, job.id, sections=sections,
                          serialize_results_to=serialize_results_to,
                          oq_job_profile=job.profile(), oq_job=job,
                          log_level=log_level, force_inputs=force_inputs)

    # closing all db connections to make sure they're not shared between
    # supervisor and job executor processes. otherwise if one of them closes
    # the connection it immediately becomes unavailable for other
    close_connection()

    job_pid = os.fork()
    if not job_pid:
        # calculation executor process
        try:
            logs.init_logs_amqp_send(level=log_level, job_id=job.id)
            _launch_job(job_ctxt, sections)
        except Exception, ex:
            logs.LOG.critical("Calculation failed with exception: '%s'"
                              % str(ex))
            job.status = 'failed'
            job.save()
            raise
        else:
            job.status = 'succeeded'
            job.save()
        return
コード例 #37
0
ファイル: utils_stats_test.py プロジェクト: matley/oq-engine
 def test_failure_counters_with_no_failures(self):
     # An empty list is returned in the absence of any failure counters
     stats.delete_job_counters(123)
     self.assertEqual([], stats.failure_counters(123))
コード例 #38
0
ファイル: engine.py プロジェクト: kpanic/openquake
def run_calculation(job_profile, params, sections, output_type='db'):
    """Given an :class:`openquake.db.models.OqJobProfile` object, create a new
    :class:`openquake.db.models.OqCalculation` object and run the calculation.

    NOTE: The params and sections parameters are temporary but will be required
    until we can run calculations purely using Django model objects as
    calculator input.

    Returns the calculation object when the calculation concludes.

    :param job_profile:
        :class:`openquake.db.models.OqJobProfile` instance.
    :param params:
        A dictionary of config parameters parsed from the calculation
        config file.
    :param sections:
        A list of sections parsed from the calculation config file.
    :param output_type:
        'db' or 'xml' (defaults to 'db')

    :returns:
        :class:`openquake.db.models.OqCalculation` instance.
    """
    if not output_type in ('db', 'xml'):
        raise RuntimeError("output_type must be 'db' or 'xml'")

    calculation = OqCalculation(owner=job_profile.owner)
    calculation.oq_job_profile = job_profile
    calculation.status = 'running'
    calculation.save()

    # Clear any counters for this calculation_id, prior to running the
    # calculation.
    # We do this just to make sure all of the counters behave properly and can
    # provide accurate data about a calculation in-progress.
    stats.delete_job_counters(calculation.id)

    # Make the job/calculation ID generally available.
    utils_config.Config().job_id = calculation.id

    serialize_results_to = ['db']
    if output_type == 'xml':
        serialize_results_to.append('xml')

    calc_proxy = CalculationProxy(params, calculation.id, sections=sections,
                                  serialize_results_to=serialize_results_to,
                                  oq_job_profile=job_profile,
                                  oq_calculation=calculation)

    # closing all db connections to make sure they're not shared between
    # supervisor and job executor processes. otherwise if one of them closes
    # the connection it immediately becomes unavailable for other
    close_connection()

    calc_pid = os.fork()
    if not calc_pid:
        # calculation executor process
        try:
            logs.init_logs_amqp_send(level=FLAGS.debug, job_id=calculation.id)
            _launch_calculation(calc_proxy, sections)
        except Exception, ex:
            logs.LOG.critical("Calculation failed with exception: '%s'"
                              % str(ex))
            calculation.status = 'failed'
            calculation.save()
            raise
        else:
            calculation.status = 'succeeded'
            calculation.save()
        return
コード例 #39
0
def run_job(job,
            params,
            sections,
            output_type='db',
            log_level='warn',
            force_inputs=False,
            log_file=None):
    """Given an :class:`openquake.db.models.OqJobProfile` object, create a new
    :class:`openquake.db.models.OqJob` object and run the job.

    NOTE: The params and sections parameters are temporary but will be required
    until we can run calculations purely using Django model objects as
    calculator input.

    Returns the calculation object when the calculation concludes.

    :param job:
        :class:`openquake.db.models.OqJob` instance
    :param params:
        A dictionary of config parameters parsed from the calculation
        config file.
    :param sections:
        A list of sections parsed from the calculation config file.
    :param output_type:
        'db' or 'xml' (defaults to 'db')
    :param str log_level:
        One of 'debug', 'info', 'warn', 'error', or 'critical'.
        Defaults to 'warn'.
    :param bool force_inputs: If `True` the model input files will be parsed
        and the resulting content written to the database no matter what.
    :param str log_file:
        Optional log file location.

    :returns:
        :class:`openquake.db.models.OqJob` instance.
    """
    if not output_type in ('db', 'xml'):
        raise RuntimeError("output_type must be 'db' or 'xml'")

    job.description = job.profile().description
    job.status = 'running'
    job.save()

    # Clear any counters for this job_id, prior to running the
    # job.
    # We do this just to make sure all of the counters behave properly and can
    # provide accurate data about a calculation in-progress.
    stats.delete_job_counters(job.id)

    # Make the job/calculation ID generally available.
    utils_config.Config().job_id = job.id

    serialize_results_to = ['db']
    if output_type == 'xml':
        serialize_results_to.append('xml')

    job_ctxt = JobContext(params,
                          job.id,
                          sections=sections,
                          serialize_results_to=serialize_results_to,
                          oq_job_profile=job.profile(),
                          oq_job=job,
                          log_level=log_level,
                          force_inputs=force_inputs)

    # closing all db connections to make sure they're not shared between
    # supervisor and job executor processes. otherwise if one of them closes
    # the connection it immediately becomes unavailable for other
    close_connection()

    job_pid = os.fork()
    if not job_pid:
        # calculation executor process
        try:
            logs.init_logs_amqp_send(level=log_level, job_id=job.id)
            _launch_job(job_ctxt, sections)
        except Exception, ex:
            logs.LOG.critical("Calculation failed with exception: '%s'" %
                              str(ex))
            job.status = 'failed'
            job.save()
            raise
        else:
            job.status = 'succeeded'
            job.save()
        return
コード例 #40
0
 def test_pk_get_with_non_existent_predef_key(self):
     """`KeyError` is raised for keys that do not exist in `STATS_KEYS`."""
     job_id = 93
     pkey = "This is unlikely to exist"
     stats.delete_job_counters(job_id)
     self.assertRaises(KeyError, stats.pk_get, job_id, pkey)
コード例 #41
0
 def test_delete_job_counters_copes_with_nonexistent_counters(self):
     """
     stats.delete_job_counters() copes with jobs without progress indication
     counters.
     """
     stats.delete_job_counters(sys.maxint)
コード例 #42
0
 def test_pk_get_with_non_existent_predef_key(self):
     """`KeyError` is raised for keys that do not exist in `STATS_KEYS`."""
     job_id = 93
     pkey = "This is unlikely to exist"
     stats.delete_job_counters(job_id)
     self.assertRaises(KeyError, stats.pk_get, job_id, pkey)
コード例 #43
0
 def test_failure_counters_with_no_failures(self):
     # An empty list is returned in the absence of any failure counters
     stats.delete_job_counters(123)
     self.assertEqual([], stats.failure_counters(123))
コード例 #44
0
 def test_delete_job_counters_copes_with_nonexistent_counters(self):
     """
     stats.delete_job_counters() copes with jobs without progress indication
     counters.
     """
     stats.delete_job_counters(sys.maxint)