def setUp(self): inputs = [("fragility", ""), ("exposure", "")] self.job = self.setup_classic_job(inputs=inputs) kvs.mark_job_as_current(self.job.id) kvs.cache_gc(self.job.id) self.site = Site(1.0, 1.0) block = Block(self.job.id, BLOCK_ID, [self.site]) block.to_kvs() # this region contains a single site, that is exactly # a site with longitude == 1.0 and latitude == 1.0 params = {"REGION_VERTEX": "1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0", "REGION_GRID_SPACING": "0.5", "BASE_PATH": ".", "OUTPUT_DIR": "."} self.job_ctxt = JobContext(params, self.job.id, oq_job=self.job) self.em = self._store_em() self._store_gmvs([0.40, 0.30, 0.45, 0.35, 0.40]) self.calculator = ScenarioDamageRiskCalculator(self.job_ctxt) # just stubbing out some preprocessing stuff... ScenarioDamageRiskCalculator.store_exposure_assets = lambda self: None ScenarioDamageRiskCalculator.store_fragility_model = lambda self: None ScenarioDamageRiskCalculator.partition = lambda self: None
def __init__(self, params, calculation_id, sections=list(), base_path=None, serialize_results_to=list(), oq_job_profile=None, oq_calculation=None): """ :param dict params: Dict of job config params. :param int calculation_id: ID of the corresponding oq_calculation db record. :param list sections: List of config file sections. Example:: ['HAZARD', 'RISK'] :param str base_path: base directory containing job input files :param oq_job_profile: :class:`openquake.db.models.OqJobProfile` instance; database representation of the job profile / calculation configuration. :param oq_calculation: :class:`openquake.db.models.OqCalculation` instance; database representation of the runtime thing we refer to as the 'calculation'. """ self._calculation_id = calculation_id mark_job_as_current(calculation_id) # enables KVS gc self.sites = [] self.blocks_keys = [] self.params = params self.sections = list(set(sections)) self.serialize_results_to = [] self._base_path = base_path self.serialize_results_to = list(serialize_results_to) self.oq_job_profile = oq_job_profile self.oq_calculation = oq_calculation
def test_get_current_job_ids(self): """ Given the test data, make sure that :py:function:`bin.cache_gc._get_current_job_ids` returns the correct IDs. """ # create 3 jobs # this will add job keys to CURRENT_JOBS for job_id in range(1, 4): kvs.mark_job_as_current(job_id) job_ids = cache_gc._get_current_job_ids() self.assertEqual([1, 2, 3], job_ids)
def __init__( self, params, job_id, sections=list(), base_path=None, serialize_results_to=list(), oq_job_profile=None, oq_job=None, log_level="warn", force_inputs=False, ): """ :param dict params: Dict of job config params. :param int job_id: ID of the corresponding oq_job db record. :param list sections: List of config file sections. Example:: ['HAZARD', 'RISK'] :param str base_path: base directory containing job input files :param oq_job_profile: :class:`openquake.db.models.OqJobProfile` instance; database representation of the job profile / calculation configuration. :param oq_job: :class:`openquake.db.models.OqJob` instance; database representation of the runtime thing we refer to as the 'calculation'. :param str log_level: One of 'debug', 'info', 'warn', 'error', 'critical'. Defaults to 'warn'. :param bool force_inputs: If `True` the model input files will be parsed and the resulting content written to the database no matter what. """ self._job_id = job_id mark_job_as_current(job_id) # enables KVS gc self.sites = [] self.blocks_keys = [] self.params = params self.sections = list(set(sections)) self.serialize_results_to = [] self._base_path = base_path self.serialize_results_to = list(serialize_results_to) self.oq_job_profile = oq_job_profile self.oq_job = oq_job self.params["debug"] = log_level self._log_level = log_level self.force_inputs = force_inputs
def __init__(self, params, job_id, sections=list(), base_path=None, serialize_results_to=list(), oq_job_profile=None, oq_job=None, log_level='warn', force_inputs=False): """ :param dict params: Dict of job config params. :param int job_id: ID of the corresponding oq_job db record. :param list sections: List of config file sections. Example:: ['HAZARD', 'RISK'] :param str base_path: base directory containing job input files :param oq_job_profile: :class:`openquake.db.models.OqJobProfile` instance; database representation of the job profile / calculation configuration. :param oq_job: :class:`openquake.db.models.OqJob` instance; database representation of the runtime thing we refer to as the 'calculation'. :param str log_level: One of 'debug', 'info', 'warn', 'error', 'critical'. Defaults to 'warn'. :param bool force_inputs: If `True` the model input files will be parsed and the resulting content written to the database no matter what. """ self._job_id = job_id mark_job_as_current(job_id) # enables KVS gc self.sites = [] self.blocks_keys = [] self.params = params self.sections = list(set(sections)) self.serialize_results_to = [] self._base_path = base_path self.serialize_results_to = list(serialize_results_to) self.oq_job_profile = oq_job_profile self.oq_job = oq_job self.params['debug'] = log_level self._log_level = log_level self.force_inputs = force_inputs
def test_mark_job_as_current(self): """ Test the generation of job keys using :py:function:`openquake.kvs.mark_job_as_current`. """ job_id_1 = 1 job_id_2 = 2 kvs.mark_job_as_current(job_id_1) kvs.mark_job_as_current(job_id_2) # now verify that these keys have been added to the CURRENT_JOBS set self.assertTrue( self.client.sismember(kvs.tokens.CURRENT_JOBS, job_id_1)) self.assertTrue( self.client.sismember(kvs.tokens.CURRENT_JOBS, job_id_2))
def test_mark_job_as_current(self): """ Test the generation of job keys using :py:function:`openquake.kvs.mark_job_as_current`. """ job_id_1 = 1 job_id_2 = 2 kvs.mark_job_as_current(job_id_1) kvs.mark_job_as_current(job_id_2) # now verify that these keys have been added to the CURRENT_JOBS set self.assertTrue(self.client.sismember(kvs.tokens.CURRENT_JOBS, job_id_1)) self.assertTrue(self.client.sismember(kvs.tokens.CURRENT_JOBS, job_id_2))
def _run_calc(job, log_level, log_file, exports, calc, job_type): """ Run a calculation. :param job: :class:`openquake.db.model.OqJob` instance which references a valid :class:`openquake.db.models.RiskCalculation` or :class:`openquake.db.models.HazardCalculation`. :param str log_level: The desired logging level. Valid choices are 'debug', 'info', 'progress', 'warn', 'error', and 'critical'. :param str log_file: Complete path (including file name) to file where logs will be written. If `None`, logging will just be printed to standard output. :param list exports: A (potentially empty) list of export targets. Currently only "xml" is supported. :param calc: Calculator object, which must implement the interface of :class:`openquake.calculators.base.CalculatorNext`. :param str job_type: 'hazard' or 'risk' """ # Closing all db connections to make sure they're not shared between # supervisor and job executor processes. # Otherwise, if one of them closes the connection it immediately becomes # unavailable for others. close_connection() job_pid = os.fork() if not job_pid: # calculation executor process try: logs.init_logs_amqp_send(level=log_level, job_id=job.id) # run the job job.is_running = True job.save() kvs.mark_job_as_current(job.id) _do_run_calc(job, exports, calc, job_type) except Exception, ex: logs.LOG.critical("Calculation failed with exception: '%s'" % str(ex)) raise finally:
def __init__(self, params, job_id, sections=list(), base_path=None, serialize_results_to=list()): """ :param dict params: Dict of job config params. :param int job_id: ID of the corresponding oq_job db record. :param list sections: List of config file sections. Example:: ['HAZARD', 'RISK'] :param str base_path: base directory containing job input files """ self._job_id = job_id mark_job_as_current(job_id) # enables KVS gc self.sites = [] self.blocks_keys = [] self.params = params self.sections = list(set(sections)) self.serialize_results_to = [] self.base_path = base_path self.serialize_results_to = list(serialize_results_to)
def setUp(self): self.client = kvs.get_client() self.client.flushdb() self.test_job = 1 kvs.mark_job_as_current(self.test_job) # create some keys to hold fake data for test_job self.gmf1_key = kvs.tokens.gmf_set_key(self.test_job, 0, 0) self.gmf2_key = kvs.tokens.gmf_set_key(self.test_job, 0, 1) self.vuln_key = kvs.tokens.vuln_key(self.test_job) # now create the fake data for test_job self.client.set(self.gmf1_key, 'fake gmf data 1') self.client.set(self.gmf2_key, 'fake gmf data 2') self.client.set(self.vuln_key, 'fake vuln curve data') # this job will have no data self.dataless_job = 2 kvs.mark_job_as_current(self.dataless_job)
def run_hazard(job, log_level, log_file, exports): """Run a hazard job. :param job: :class:`openquake.db.models.OqJob` instance which references a valid :class:`openquake.db.models.HazardCalculation`. :param list exports: a (potentially empty) list of export targets, currently only "xml" is supported """ # Closing all db connections to make sure they're not shared between # supervisor and job executor processes. # Otherwise, if one of them closes the connection it immediately becomes # unavailable for others. close_connection() job_pid = os.fork() if not job_pid: # calculation executor process try: logs.init_logs_amqp_send(level=log_level, job_id=job.id) # record initial job stats hc = job.hazard_calculation models.JobStats.objects.create( oq_job=job, num_sites=len(hc.points_to_compute()), realizations=hc.number_of_logic_tree_samples) # run the job job.is_running = True job.save() kvs.mark_job_as_current(job.id) _do_run_hazard(job, exports) except Exception, ex: logs.LOG.critical("Calculation failed with exception: '%s'" % str(ex)) raise finally:
def __init__(self, params, job_id, sections=list(), base_path=None, validator=None): """ :param dict params: Dict of job config params. :param int job_id: ID of the corresponding oq_job db record. :param list sections: List of config file sections. Example:: ['HAZARD', 'RISK'] :param str base_path: base directory containing job input files :param validator: validator(s) used to check the configuration file """ self._job_id = job_id mark_job_as_current(job_id) # enables KVS gc setup_job_logging(self.job_id) self.blocks_keys = [] self.params = params self.sections = list(set(sections)) self.serialize_results_to = [] self.base_path = base_path self.validator = validator if base_path: self.to_kvs()
def tearDown(self): kvs.mark_job_as_current(self.job_id) kvs.cache_gc(self.job_id)
def setUp(self): kvs.mark_job_as_current(self.job_id) kvs.cache_gc(self.job_id)