Exemple #1
0
def run_hazard_job(cfg, exports=None):
    """
    Given the path to job config file, run the job and assert that it was
    successful. If this assertion passes, return the completed job.

    :param str cfg:
        Path to a job config file.
    :param list exports:
        A list of export format types. Currently only 'xml' is supported.
    :returns:
        The completed :class:`~openquake.engine.db.models.OqJob`.
    """
    if exports is None:
        exports = []

    job = get_hazard_job(cfg)
    job.is_running = True
    job.save()

    models.JobStats.objects.create(oq_job=job)

    hc = job.hazard_calculation
    calc = get_calculator_class('hazard', hc.calculation_mode)(job)
    try:
        logs.init_logs_amqp_send(
            level='ERROR', calc_domain='hazard', calc_id=hc.id)
        engine._do_run_calc(job, exports, calc, 'hazard')
    finally:
        job.is_running = False
        job.calc = calc
        job.save()
    return job
Exemple #2
0
def run_risk_job(cfg, exports=None, hazard_calculation_id=None,
                 hazard_output_id=None):
    """
    Given the path to a risk job config file and a hazard_calculation_id
    or a output, run the job.
    """
    if exports is None:
        exports = []

    # You can't specify both a hazard output and hazard calculation
    # Pick one
    assert not (hazard_calculation_id is not None
                and hazard_output_id is not None)

    job = get_risk_job(cfg, hazard_calculation_id=hazard_calculation_id,
                       hazard_output_id=hazard_output_id)
    job.is_running = True
    job.save()

    models.JobStats.objects.create(oq_job=job)

    rc = job.risk_calculation
    calc = get_calculator_class('risk', rc.calculation_mode)(job)
    logs.init_logs_amqp_send(level='ERROR', calc_domain='risk', calc_id=rc.id)
    completed_job = engine._do_run_calc(job, exports, calc, 'risk')
    job.is_running = False
    job.save()

    return completed_job
Exemple #3
0
 def test_init_logs_amqp_send_with_no_amqp_handler(self):
     # init_logs_amqp_send() will add an `AMQPHandler` instance to the
     # root logger if none is present.
     mm = mock.MagicMock(spec=kombu.messaging.Producer)
     with mock.patch.object(logs.AMQPHandler, "_initialize") as minit:
         minit.return_value = mm
         with helpers.patch("logging.root.addHandler") as mah:
             logs.init_logs_amqp_send("info", 'hazard', 321)
             self.assertEqual(1, mah.call_count)
             (single_arg, ) = mah.call_args[0]
             self.assertTrue(isinstance(single_arg, logs.AMQPHandler))
     self.assertEqual(logging.root.level, logging.INFO)
Exemple #4
0
 def test_init_logs_amqp_send_with_no_amqp_handler(self):
     # init_logs_amqp_send() will add an `AMQPHandler` instance to the
     # root logger if none is present.
     mm = mock.MagicMock(spec=kombu.messaging.Producer)
     with mock.patch.object(logs.AMQPHandler, "_initialize") as minit:
         minit.return_value = mm
         with helpers.patch("logging.root.addHandler") as mah:
             logs.init_logs_amqp_send("info", 'hazard', 321)
             self.assertEqual(1, mah.call_count)
             (single_arg,) = mah.call_args[0]
             self.assertTrue(isinstance(single_arg, logs.AMQPHandler))
     self.assertEqual(logging.root.level, logging.INFO)
Exemple #5
0
def run_calc(job, log_level, log_file, exports, job_type):
    """
    Run a calculation.

    :param job:
        :class:`openquake.engine.db.model.OqJob` instance which references a
        valid :class:`openquake.engine.db.models.RiskCalculation` or
        :class:`openquake.engine.db.models.HazardCalculation`.
    :param str log_level:
        The desired logging level. Valid choices are 'debug', 'info',
        'progress', 'warn', 'error', and 'critical'.
    :param str log_file:
        Complete path (including file name) to file where logs will be written.
        If `None`, logging will just be printed to standard output.
    :param list exports:
        A (potentially empty) list of export targets. Currently only "xml" is
        supported.
    :param calc:
        Calculator object, which must implement the interface of
        :class:`openquake.engine.calculators.base.Calculator`.
    :param str job_type:
        'hazard' or 'risk'
    """
    calc_mode = getattr(job, '%s_calculation' % job_type).calculation_mode
    calc = get_calculator_class(job_type, calc_mode)(job)

    # Create job stats, which implicitly records the start time for the job
    models.JobStats.objects.create(oq_job=job)

    # Closing all db connections to make sure they're not shared between
    # supervisor and job executor processes.
    # Otherwise, if one of them closes the connection it immediately becomes
    # unavailable for others.
    close_connection()

    job_pid = os.fork()

    if not job_pid:
        # calculation executor process
        try:
            logs.init_logs_amqp_send(level=log_level, calc_domain=job_type,
                                     calc_id=job.calculation.id)
            # run the job
            job.is_running = True
            job.save()
            kvs.mark_job_as_current(job.id)
            _do_run_calc(job, exports, calc, job_type)
        except Exception, ex:
            logs.LOG.critical("Calculation failed with exception: '%s'"
                              % str(ex))
            raise
        finally:
Exemple #6
0
    def wrapped(*args, **kwargs):
        """
        Initialize logs, make sure the job is still running, and run the task
        code surrounded by a try-except. If any error occurs, log it as a
        critical failure.
        """
        # job_id is always assumed to be the first argument passed to
        # the task, or a keyword argument
        # this is the only required argument
        job_id = kwargs.get('job_id') or args[0]

        with EnginePerformanceMonitor(
                'totals per task', job_id, tsk, flush=True):
            job = models.OqJob.objects.get(id=job_id)

            # it is important to save the task ids soon, so that
            # the revoke functionality implemented in supervisor.py can work
            EnginePerformanceMonitor.store_task_id(job_id, tsk)

            with EnginePerformanceMonitor(
                    'loading calculation object', job_id, tsk, flush=True):
                calculation = job.calculation

            # Set up logging via amqp.
            if isinstance(calculation, models.HazardCalculation):
                logs.init_logs_amqp_send(level=job.log_level,
                                         calc_domain='hazard',
                                         calc_id=calculation.id)
            else:
                logs.init_logs_amqp_send(level=job.log_level,
                                         calc_domain='risk',
                                         calc_id=calculation.id)

            try:
                # Tasks can be used in the `execute` or `post-process` phase
                if job.is_running is False:
                    raise JobCompletedError('Job %d was killed' % job_id)
                elif job.status not in ('executing', 'post_processing'):
                    raise JobCompletedError(
                        'The status of job %d is %s, should be executing or '
                        'post_processing' % (job_id, job.status))
                # else continue with task execution
                res = task_func(*args, **kwargs)
            # TODO: should we do something different with JobCompletedError?
            except Exception, err:
                logs.LOG.critical('Error occurred in task: %s', err)
                logs.LOG.exception(err)
                raise
            else:
Exemple #7
0
 def test_init_logs_amqp_send_with_existing_amqp_handler(self):
     # init_logs_amqp_send() will not add more than one `AMQPHandler`
     # instance to the root logger.
     mm = mock.MagicMock(spec=kombu.messaging.Producer)
     with mock.patch.object(logs.AMQPHandler, "_initialize") as minit:
         minit.return_value = mm
         handler = logs.AMQPHandler()
         handler.set_calc_info = mock.Mock()
         logging.root.handlers.append(handler)
         with helpers.patch("logging.root.addHandler") as mah:
             logs.init_logs_amqp_send("info", 'risk', 322)
             self.assertEqual(0, mah.call_count)
             self.assertEqual(1, handler.set_calc_info.call_count)
             self.assertEqual(('risk', 322,),
                              handler.set_calc_info.call_args[0])
Exemple #8
0
def _job_exec(job, log_level, exports, job_type, calc):
    """
    Abstraction of some general job execution procedures.

    Parameters are the same as :func:`run_calc`, except for ``supervised``
    which is not included. Also ``calc`` is an instance of the calculator class
    which is passed to :func:`_do_run_calc`.
    """
    logs.init_logs_amqp_send(level=log_level, calc_domain=job_type,
                             calc_id=job.calculation.id)
    # run the job
    job.is_running = True
    job.save()
    kvs.mark_job_as_current(job.id)
    _do_run_calc(job, exports, calc, job_type)
Exemple #9
0
def _job_exec(job, log_level, exports, job_type, calc):
    """
    Abstraction of some general job execution procedures.

    Parameters are the same as :func:`run_calc`, except for ``supervised``
    which is not included. Also ``calc`` is an instance of the calculator class
    which is passed to :func:`_do_run_calc`.
    """
    logs.init_logs_amqp_send(level=log_level,
                             calc_domain=job_type,
                             calc_id=job.calculation.id)
    # run the job
    job.is_running = True
    job.save()
    kvs.mark_job_as_current(job.id)
    _do_run_calc(job, exports, calc, job_type)
Exemple #10
0
 def test_init_logs_amqp_send_with_existing_amqp_handler(self):
     # init_logs_amqp_send() will not add more than one `AMQPHandler`
     # instance to the root logger.
     mm = mock.MagicMock(spec=kombu.messaging.Producer)
     with mock.patch.object(logs.AMQPHandler, "_initialize") as minit:
         minit.return_value = mm
         handler = logs.AMQPHandler()
         handler.set_calc_info = mock.Mock()
         logging.root.handlers.append(handler)
         with helpers.patch("logging.root.addHandler") as mah:
             logs.init_logs_amqp_send("info", 'risk', 322)
             self.assertEqual(0, mah.call_count)
             self.assertEqual(1, handler.set_calc_info.call_count)
             self.assertEqual((
                 'risk',
                 322,
             ), handler.set_calc_info.call_args[0])
Exemple #11
0
    def wrapped(*args, **kwargs):
        """
        Initialize logs, make sure the job is still running, and run the task
        code surrounded by a try-except. If any error occurs, log it as a
        critical failure.
        """
        # job_id is always assumed to be the first argument passed to
        # the task, or a keyword argument
        # this is the only required argument
        job_id = kwargs.get('job_id') or args[0]
        job = models.OqJob.objects.get(id=job_id)
        if job.is_running is False:
            # the job was killed, it is useless to run the task
            return

        # it is important to save the task ids soon, so that
        # the revoke functionality implemented in supervisor.py can work
        EnginePerformanceMonitor.store_task_id(job_id, tsk)

        with EnginePerformanceMonitor('total ' + task_func.__name__,
                                      job_id,
                                      tsk,
                                      flush=True):

            with EnginePerformanceMonitor('loading calculation object',
                                          job_id,
                                          tsk,
                                          flush=True):
                calculation = job.calculation

            # Set up logging via amqp.
            if isinstance(calculation, models.HazardCalculation):
                logs.init_logs_amqp_send(level=job.log_level,
                                         calc_domain='hazard',
                                         calc_id=calculation.id)
            else:
                logs.init_logs_amqp_send(level=job.log_level,
                                         calc_domain='risk',
                                         calc_id=calculation.id)
            try:
                res = task_func(*args, **kwargs)
            except Exception, err:
                logs.LOG.critical('Error occurred in task: %s', err)
                logs.LOG.exception(err)
                raise
            else:
Exemple #12
0
    def wrapped(*args, **kwargs):
        """
        Initialize logs, make sure the job is still running, and run the task
        code surrounded by a try-except. If any error occurs, log it as a
        critical failure.
        """
        # job_id is always assumed to be the first argument passed to
        # the task, or a keyword argument
        # this is the only required argument
        job_id = kwargs.get('job_id') or args[0]
        job = models.OqJob.objects.get(id=job_id)
        if job.is_running is False:
            # the job was killed, it is useless to run the task
            return

        # it is important to save the task ids soon, so that
        # the revoke functionality implemented in supervisor.py can work
        EnginePerformanceMonitor.store_task_id(job_id, tsk)

        with EnginePerformanceMonitor(
                'total ' + task_func.__name__, job_id, tsk, flush=True):

            with EnginePerformanceMonitor(
                    'loading calculation object', job_id, tsk, flush=True):
                calculation = job.calculation

            # Set up logging via amqp.
            if isinstance(calculation, models.HazardCalculation):
                logs.init_logs_amqp_send(level=job.log_level,
                                         calc_domain='hazard',
                                         calc_id=calculation.id)
            else:
                logs.init_logs_amqp_send(level=job.log_level,
                                         calc_domain='risk',
                                         calc_id=calculation.id)
            try:
                res = task_func(*args, **kwargs)
            except Exception, err:
                logs.LOG.critical('Error occurred in task: %s', err)
                logs.LOG.exception(err)
                raise
            else:
Exemple #13
0
    def test_init_logs_amqp_send_changes_logging_level(self):
        # init_logs_amqp_send() will change the root level logger anyway.
        mm = mock.MagicMock(spec=kombu.messaging.Producer)
        with mock.patch.object(logs.AMQPHandler, "_initialize") as minit:
            minit.return_value = mm
            handler = logs.AMQPHandler()
            logging.root.handlers.append(handler)
            handler.set_calc_info = mock.Mock()

            logging.root.setLevel(logging.INFO)

            logs.init_logs_amqp_send("warning", 'hazard', 322)
            self.assertEqual(logging.root.level, logging.WARNING)

            logs.init_logs_amqp_send("debug", 'risk', 323)
            self.assertEqual(logging.root.level, logging.DEBUG)

            logs.init_logs_amqp_send("error", 'hazard', 324)
            self.assertEqual(logging.root.level, logging.ERROR)
Exemple #14
0
    def test_init_logs_amqp_send_changes_logging_level(self):
        # init_logs_amqp_send() will change the root level logger anyway.
        mm = mock.MagicMock(spec=kombu.messaging.Producer)
        with mock.patch.object(logs.AMQPHandler, "_initialize") as minit:
            minit.return_value = mm
            handler = logs.AMQPHandler()
            logging.root.handlers.append(handler)
            handler.set_calc_info = mock.Mock()

            logging.root.setLevel(logging.INFO)

            logs.init_logs_amqp_send("warning", 'hazard', 322)
            self.assertEqual(logging.root.level, logging.WARNING)

            logs.init_logs_amqp_send("debug", 'risk', 323)
            self.assertEqual(logging.root.level, logging.DEBUG)

            logs.init_logs_amqp_send("error", 'hazard', 324)
            self.assertEqual(logging.root.level, logging.ERROR)