def run_calc(job, log_level, log_file, exports, job_type): """ Run a calculation. :param job: :class:`openquake.engine.db.model.OqJob` instance which references a valid :class:`openquake.engine.db.models.RiskCalculation` or :class:`openquake.engine.db.models.HazardCalculation`. :param str log_level: The desired logging level. Valid choices are 'debug', 'info', 'progress', 'warn', 'error', and 'critical'. :param str log_file: Complete path (including file name) to file where logs will be written. If `None`, logging will just be printed to standard output. :param list exports: A (potentially empty) list of export targets. Currently only "xml" is supported. :param calc: Calculator object, which must implement the interface of :class:`openquake.engine.calculators.base.Calculator`. :param str job_type: 'hazard' or 'risk' """ calc_mode = getattr(job, '%s_calculation' % job_type).calculation_mode calc = get_calculator_class(job_type, calc_mode)(job) # Create job stats, which implicitly records the start time for the job models.JobStats.objects.create(oq_job=job) # Closing all db connections to make sure they're not shared between # supervisor and job executor processes. # Otherwise, if one of them closes the connection it immediately becomes # unavailable for others. close_connection() job_pid = os.fork() if not job_pid: # calculation executor process try: logs.init_logs_amqp_send(level=log_level, calc_domain=job_type, calc_id=job.calculation.id) # run the job job.is_running = True job.save() kvs.mark_job_as_current(job.id) _do_run_calc(job, exports, calc, job_type) except Exception, ex: logs.LOG.critical("Calculation failed with exception: '%s'" % str(ex)) raise finally:
def test_get_current_job_ids(self): """ Given the test data, make sure that :py:function:`bin.cache_gc._get_current_job_ids` returns the correct IDs. """ # create 3 jobs # this will add job keys to CURRENT_JOBS for job_id in range(1, 4): kvs.mark_job_as_current(job_id) job_ids = cache_gc._get_current_job_ids() self.assertEqual([1, 2, 3], job_ids)
def _job_exec(job, log_level, exports, job_type, calc): """ Abstraction of some general job execution procedures. Parameters are the same as :func:`run_calc`, except for ``supervised`` which is not included. Also ``calc`` is an instance of the calculator class which is passed to :func:`_do_run_calc`. """ logs.init_logs_amqp_send(level=log_level, calc_domain=job_type, calc_id=job.calculation.id) # run the job job.is_running = True job.save() kvs.mark_job_as_current(job.id) _do_run_calc(job, exports, calc, job_type)
def test_mark_job_as_current(self): """ Test the generation of job keys using :py:function:`openquake.engine.kvs.mark_job_as_current`. """ job_id_1 = 1 job_id_2 = 2 kvs.mark_job_as_current(job_id_1) kvs.mark_job_as_current(job_id_2) # now verify that these keys have been added to the CURRENT_JOBS set self.assertTrue(self.client.sismember(kvs.tokens.CURRENT_JOBS, job_id_1)) self.assertTrue(self.client.sismember(kvs.tokens.CURRENT_JOBS, job_id_2))
def test_mark_job_as_current(self): """ Test the generation of job keys using :py:function:`openquake.engine.kvs.mark_job_as_current`. """ job_id_1 = 1 job_id_2 = 2 kvs.mark_job_as_current(job_id_1) kvs.mark_job_as_current(job_id_2) # now verify that these keys have been added to the CURRENT_JOBS set self.assertTrue( self.client.sismember(kvs.tokens.CURRENT_JOBS, job_id_1)) self.assertTrue( self.client.sismember(kvs.tokens.CURRENT_JOBS, job_id_2))
def setUp(self): self.client = kvs.get_client() self.client.flushall() self.test_job = 1 kvs.mark_job_as_current(self.test_job) # create some keys to hold fake data for test_job self.gmf1_key = kvs.tokens.gmf_set_key(self.test_job, 0, 0) self.gmf2_key = kvs.tokens.gmf_set_key(self.test_job, 0, 1) self.vuln_key = kvs.tokens.vuln_key(self.test_job) # now create the fake data for test_job self.client.set(self.gmf1_key, 'fake gmf data 1') self.client.set(self.gmf2_key, 'fake gmf data 2') self.client.set(self.vuln_key, 'fake vuln curve data') # this job will have no data self.dataless_job = 2 kvs.mark_job_as_current(self.dataless_job)
def __init__(self, params, job_id, sections=list(), base_path=None, serialize_results_to=list(), oq_job_profile=None, oq_job=None, log_level='warn', force_inputs=False): """ :param dict params: Dict of job config params. :param int job_id: ID of the corresponding oq_job db record. :param list sections: List of config file sections. Example:: ['HAZARD', 'RISK'] :param str base_path: base directory containing job input files :param oq_job_profile: :class:`openquake.engine.db.models.OqJobProfile` instance; database representation of the job profile / calculation configuration. :param oq_job: :class:`openquake.engine.db.models.OqJob` instance; database representation of the runtime thing we refer to as the 'calculation'. :param str log_level: One of 'debug', 'info', 'warn', 'error', 'critical'. Defaults to 'warn'. :param bool force_inputs: If `True` the model input files will be parsed and the resulting content written to the database no matter what. """ self._job_id = job_id mark_job_as_current(job_id) # enables KVS gc self.sites = [] self.blocks_keys = [] self.params = params self.sections = list(set(sections)) self.serialize_results_to = [] self._base_path = base_path self.serialize_results_to = list(serialize_results_to) self.oq_job_profile = oq_job_profile self.oq_job = oq_job self.params['debug'] = log_level self._log_level = log_level self.force_inputs = force_inputs