def test_job_from_file(self): # make a hazard job haz_cfg = helpers.get_data_path('event_based_hazard/job.ini') haz_job = engine.job_from_file(haz_cfg, 'test_user') # make a fake Output out = models.Output.objects.create( oq_job=haz_job, display_name='fake', output_type='gmf') # make a risk job risk_cfg = helpers.get_data_path('event_based_risk/job.ini') with mock.patch.object(logs.LOG, 'warn') as warn: risk_job = engine.job_from_file( risk_cfg, 'another_user', hazard_output_id=out.id) # make sure a warning is printed because you are using a hazard # generated by a different user self.assertEqual(warn.call_args[0], ('You are using a hazard calculation ran by %s', 'test_user')) with mock.patch.object(logs.LOG, 'warn') as warn: risk_job = engine.job_from_file( risk_cfg, 'test_user', hazard_output_id=out.id, quantile_loss_curves='0.1 0.2') # make sure a warning is printed because you are using # quantile_loss_curves with a single hazard output self.assertEqual( warn.call_args[0][0], 'quantile_loss_curves is on, but you passed a single hazard ' 'output: the statistics will not be computed') # make sure the hazard job is associated correctly self.assertEqual(risk_job.hazard_calculation.id, haz_job.id)
def run_job(cfg_file, log_level, log_file, exports='', hazard_calculation_id=None): """ Run a job using the specified config file and other options. :param str cfg_file: Path to calculation config (INI-style) files. :param str log_level: 'debug', 'info', 'warn', 'error', or 'critical' :param str log_file: Path to log file. :param exports: A comma-separated string of export types requested by the user. :param hazard_calculation_id: ID of the previous calculation or None """ # if the master dies, automatically kill the workers concurrent_futures_process_monkeypatch() job_ini = os.path.abspath(cfg_file) job_id, oqparam = eng.job_from_file( job_ini, getpass.getuser(), hazard_calculation_id) calc = eng.run_calc(job_id, oqparam, log_level, log_file, exports, hazard_calculation_id=hazard_calculation_id) calc.monitor.flush() for line in logs.dbcmd('list_outputs', job_id, False): print(line) return job_id
def submit_job(job_file, temp_dir, dbname, callback_url=None, foreign_calc_id=None, hazard_output_id=None, hazard_calculation_id=None, logfile=None): """ Create a job object from the given job.ini file in the job directory and submit it to the job queue. """ try: job = oq_engine.job_from_file( job_file, "platform", DEFAULT_LOG_LEVEL, [], hazard_output_id, hazard_calculation_id) except: # catch errors in the job creation phase etype, exc, tb = sys.exc_info() einfo = "".join(traceback.format_tb(tb)) einfo += '%s: %s' % (etype.__name__, exc) tasks.update_calculation(callback_url, status="failed", einfo=einfo) raise calc = job.calculation job_type = 'risk' if job.calculation is job.risk_calculation else 'hazard' future = executor.submit( tasks.run_calc, job_type, calc.id, temp_dir, callback_url, foreign_calc_id, dbname, logfile) return job, future
def run_job(job_ini, log_level='info', log_file=None, exports='', username=getpass.getuser(), **kw): """ Run a job using the specified config file and other options. :param str job_ini: Path to calculation config (INI-style) files. :param str log_level: 'debug', 'info', 'warn', 'error', or 'critical' :param str log_file: Path to log file. :param exports: A comma-separated string of export types requested by the user. :param username: Name of the user running the job :param kw: Extra parameters like hazard_calculation_id and calculation_mode """ job_id = logs.init('job', getattr(logging, log_level.upper())) with logs.handle(job_id, log_level, log_file): job_ini = os.path.abspath(job_ini) oqparam = eng.job_from_file(job_ini, job_id, username, **kw) kw['username'] = username eng.run_calc(job_id, oqparam, exports, **kw) for line in logs.dbcmd('list_outputs', job_id, False): safeprint(line) return job_id
def test_job_from_file(self): # make a hazard job haz_cfg = helpers.get_data_path('event_based_hazard/job.ini') haz_job = engine.job_from_file(haz_cfg, 'test_user') # make a fake Output out = models.Output.objects.create( oq_job=haz_job, display_name='fake', output_type='gmf') # make a risk job risk_cfg = helpers.get_data_path('event_based_risk/job.ini') risk_job = engine.job_from_file(risk_cfg, 'test_user', hazard_output_id=out.id) # make sure the hazard job is associated correctly oqjob = risk_job.risk_calculation.hazard_calculation self.assertEqual(oqjob.id, haz_job.id)
def get_job(cfg, username="******", hazard_calculation_id=None, hazard_output_id=None, **extras): """ Given a path to a config file and a hazard_calculation_id (or, alternatively, a hazard_output_id, create a :class:`openquake.engine.db.models.OqJob` object for a risk calculation. """ if hazard_calculation_id is None and hazard_output_id is None: return engine.job_from_file(cfg, username, 'error', [], **extras) job = engine.prepare_job(username) oqparam = readini.parse_config( open(cfg), hazard_calculation_id, hazard_output_id) params = vars(oqparam) if hazard_calculation_id is None: params['hazard_calculation_id'] = models.Output.objects.get( pk=hazard_output_id).oq_job.id # we are removing intensity_measure_types_and_levels because it is not # a field of RiskCalculation; this ugliness will disappear when # RiskCalculation will be removed del params['intensity_measure_types_and_levels'] job.save_params(params) risk_calc = engine.create_calculation(models.RiskCalculation, params) risk_calc = models.RiskCalculation.objects.get(id=risk_calc.id) job.risk_calculation = risk_calc job.save() return job
def run_job(cfg_file, log_level='info', log_file=None, exports='', hazard_calculation_id=None, **kw): """ Run a job using the specified config file and other options. :param str cfg_file: Path to calculation config (INI-style) files. :param str log_level: 'debug', 'info', 'warn', 'error', or 'critical' :param str log_file: Path to log file. :param exports: A comma-separated string of export types requested by the user. :param hazard_calculation_id: ID of the previous calculation or None """ # if the master dies, automatically kill the workers job_ini = os.path.abspath(cfg_file) job_id, oqparam = eng.job_from_file( job_ini, getpass.getuser(), hazard_calculation_id) calc = eng.run_calc(job_id, oqparam, log_level, log_file, exports, hazard_calculation_id=hazard_calculation_id, **kw) calc._monitor.flush() for line in logs.dbcmd('list_outputs', job_id, False): safeprint(line) return job_id
def submit_job(job_ini, user_name, hazard_job_id=None, loglevel=DEFAULT_LOG_LEVEL, logfile=None, exports=''): """ Create a job object from the given job.ini file in the job directory and submit it to the job queue. Returns the job ID. """ job_id, oqparam = engine.job_from_file(job_ini, user_name, hazard_job_id) fut = executor.submit(engine.run_calc, job_id, oqparam, loglevel, logfile, exports, hazard_job_id) return job_id, fut
def run_job(cfg, exports='xml,csv', hazard_calculation_id=None, **params): """ Given the path to a job config file and a hazard_calculation_id or a output, run the job. :returns: a calculator object """ job_id, oqparam = engine.job_from_file( cfg, 'openquake', 'error', [], hazard_calculation_id, **params) logfile = os.path.join(tempfile.gettempdir(), 'qatest.log') return engine.run_calc(job_id, oqparam, 'error', logfile, exports)
def run_job(cfg, exports='xml,csv', hazard_calculation_id=None, **params): """ Given the path to a job config file and a hazard_calculation_id or a output, run the job. :returns: a calculator object """ job_id, oqparam = engine.job_from_file(cfg, 'openquake', 'error', [], hazard_calculation_id, **params) logfile = os.path.join(tempfile.gettempdir(), 'qatest.log') return engine.run_calc(job_id, oqparam, 'error', logfile, exports)
def test_job_from_file(self): # make a hazard job haz_cfg = helpers.get_data_path('event_based_hazard/job.ini') haz_job = engine.job_from_file(haz_cfg, 'test_user') # make a fake Output out = models.Output.objects.create(oq_job=haz_job, display_name='fake', output_type='gmf') # make a risk job risk_cfg = helpers.get_data_path('event_based_risk/job.ini') with mock.patch.object(logs.LOG, 'warn') as warn: risk_job = engine.job_from_file(risk_cfg, 'another_user', hazard_output_id=out.id) # make sure a warning is printed because you are using a hazard # generated by a different user self.assertEqual( warn.call_args[0], ('You are using a hazard calculation ran by %s', 'test_user')) with mock.patch.object(logs.LOG, 'warn') as warn: risk_job = engine.job_from_file(risk_cfg, 'test_user', hazard_output_id=out.id, quantile_loss_curves='0.1 0.2') # make sure a warning is printed because you are using # quantile_loss_curves with a single hazard output self.assertEqual( warn.call_args[0][0], 'quantile_loss_curves is on, but you passed a single hazard ' 'output: the statistics will not be computed') # make sure the hazard job is associated correctly self.assertEqual(risk_job.hazard_calculation.id, haz_job.id)
def get_job(cfg, username="******", hazard_calculation_id=None, hazard_output_id=None, **extras): """ Given a path to a config file and a hazard_calculation_id (or, alternatively, a hazard_output_id, create a :class:`openquake.engine.db.models.OqJob` object for a risk calculation. """ if hazard_output_id and not hazard_calculation_id: hazard_calculation_id = models.Output.objects.get( pk=hazard_output_id).oq_job.id return engine.job_from_file( cfg, username, 'error', [], hazard_calculation_id=hazard_calculation_id, hazard_output_id=hazard_output_id, **extras)
def submit_job(job_ini, user_name, hazard_job_id=None): """ Create a job object from the given job.ini file in the job directory and run it in a new process. Returns the job ID and PID. """ job_id, oq = engine.job_from_file(job_ini, user_name, hazard_job_id) pik = pickle.dumps(oq, protocol=0) # human readable protocol code = RUNCALC % dict(job_id=job_id, hazard_job_id=hazard_job_id, pik=pik) tmp_py = writetmp(code, suffix='.py') # print(code, tmp_py) # useful when debugging devnull = getattr(subprocess, 'DEVNULL', None) # defined in Python 3 popen = subprocess.Popen([sys.executable, tmp_py], stdin=devnull, stdout=devnull, stderr=devnull) threading.Thread(target=popen.wait).start() return job_id, popen.pid
def test(self): # check that if risk models are provided, then the sites # and the imls are got from there cfg = helpers.get_data_path('classical_job-sd-imt.ini') job = engine.job_from_file(cfg, helpers.default_user()) job.is_running = True job.save() calc = calculators(job) calc.parse_risk_model() self.assertEqual(['PGA'], list(calc.oqparam.imtls)) self.assertEqual(3, calc.job.exposuremodel.exposuredata_set.count()) return job
def submit_job(job_ini, username, hazard_job_id=None): """ Create a job object from the given job.ini file in the job directory and run it in a new process. Returns the job ID and PID. """ job_id, oq = engine.job_from_file(job_ini, username, hazard_job_id) pik = pickle.dumps(oq, protocol=0) # human readable protocol code = RUNCALC % dict(job_id=job_id, hazard_job_id=hazard_job_id, pik=pik, username=username) tmp_py = gettemp(code, suffix='.py') # print(code, tmp_py) # useful when debugging devnull = subprocess.DEVNULL popen = subprocess.Popen([sys.executable, tmp_py], stdin=devnull, stdout=devnull, stderr=devnull) threading.Thread(target=popen.wait).start() logs.dbcmd('update_job', job_id, {'pid': popen.pid}) return job_id, popen.pid
def test(self): cfg = helpers.get_data_path('event_based_hazard/job.ini') job = engine.job_from_file(cfg, 'test_user') with tempfile.NamedTemporaryFile() as temp: with self.assertRaises(ZeroDivisionError), mock.patch( 'openquake.engine.engine._do_run_calc', lambda *args: 1/0 ), mock.patch('openquake.engine.engine.cleanup_after_job', lambda job: None): engine.run_calc(job, 'info', temp.name, exports=[]) logged = open(temp.name).read() # make sure the real error has been logged self.assertIn('integer division or modulo by zero', logged) # also check the spurious cleanup error self.assertIn('TypeError: <lambda>() got an unexpected keyword ' "argument 'terminate'", logged)
def get_job(cfg, username="******", hazard_calculation_id=None, hazard_output_id=None): """ Given a path to a config file and a hazard_calculation_id (or, alternatively, a hazard_output_id, create a :class:`openquake.engine.db.models.OqJob` object for a risk calculation. """ if hazard_calculation_id is None and hazard_output_id is None: return engine.job_from_file(cfg, username, 'error', []) job = engine.prepare_job(username) params = vars(readini.parse_config( open(cfg), hazard_calculation_id, hazard_output_id)) risk_calc = engine.create_calculation(models.RiskCalculation, params) risk_calc = models.RiskCalculation.objects.get(id=risk_calc.id) job.risk_calculation = risk_calc job.save() return job
def test(self): # check that if risk models are provided, then the sites # and the imls are got from there cfg = helpers.get_data_path('classical_job-sd-imt.ini') job = engine.job_from_file(cfg, helpers.default_user()) job.is_running = True job.save() haz_calc = job.get_oqparam() calc = get_calculator_class('hazard', haz_calc.calculation_mode)(job) calc.parse_risk_models() self.assertEqual(['PGA'], list(calc.hc.intensity_measure_types_and_levels)) self.assertEqual(3, calc.job.exposuremodel.exposuredata_set.count()) return job
def submit_job(job_ini, username, hazard_job_id=None): """ Create a job object from the given job.ini file in the job directory and run it in a new process. Returns the job ID and PID. """ job_id = logs.init('job') oq = engine.job_from_file( job_ini, job_id, username, hazard_calculation_id=hazard_job_id) pik = pickle.dumps(oq, protocol=0) # human readable protocol code = RUNCALC % dict(job_id=job_id, hazard_job_id=hazard_job_id, pik=pik, username=username) tmp_py = gettemp(code, suffix='.py') # print(code, tmp_py) # useful when debugging devnull = subprocess.DEVNULL popen = subprocess.Popen([sys.executable, tmp_py], stdin=devnull, stdout=devnull, stderr=devnull) threading.Thread(target=popen.wait).start() logs.dbcmd('update_job', job_id, {'pid': popen.pid}) return job_id, popen.pid
def pre_execute(job_ini): """ Run a hazard calculation, but stops it immediately after the pre_execute phase. In this way it is possible to determine the input_weight and output_weight of the calculation without running it. """ job = job_from_file(job_ini, getpass.getuser(), 'info', []) calc_mode = job.hazard_calculation.calculation_mode calculator = get_calculator_class('hazard', calc_mode)(job) handler = LogStreamHandler(job) logging.root.addHandler(handler) logs.set_level('info') t0 = time.time() try: calculator.pre_execute() finally: duration = time.time() - t0 logs.LOG.info('Pre_execute time: %s s', duration) logging.root.removeHandler(handler)
def run_jobs(job_inis, log_level='info', log_file=None, exports='', username=getpass.getuser(), **kw): """ Run jobs using the specified config file and other options. :param str job_inis: A list of paths to .ini files. :param str log_level: 'debug', 'info', 'warn', 'error', or 'critical' :param str log_file: Path to log file. :param exports: A comma-separated string of export types requested by the user. :param username: Name of the user running the job :param kw: Extra parameters like hazard_calculation_id and calculation_mode """ dist = parallel.oq_distribute() jobparams = [] for job_ini in job_inis: # NB: the logs must be initialized BEFORE everything job_id = logs.init('job', getattr(logging, log_level.upper())) with logs.handle(job_id, log_level, log_file): oqparam = eng.job_from_file(os.path.abspath(job_ini), job_id, username, **kw) if (not jobparams and 'csm_cache' not in kw and 'hazard_calculation_id' not in kw): kw['hazard_calculation_id'] = job_id jobparams.append((job_id, oqparam)) jobarray = len(jobparams) > 1 and 'csm_cache' in kw try: eng.poll_queue(job_id, poll_time=15) # wait for an empty slot or a CTRL-C except BaseException: # the job aborted even before starting for job_id, oqparam in jobparams: logs.dbcmd('finish', job_id, 'aborted') return jobparams else: for job_id, oqparam in jobparams: dic = {'status': 'executing', 'pid': eng._PID} if jobarray: dic['hazard_calculation_id'] = jobparams[0][0] logs.dbcmd('update_job', job_id, dic) try: if dist == 'zmq' and config.zworkers['host_cores']: logging.info('Asking the DbServer to start the workers') logs.dbcmd('zmq_start') # start the zworkers logs.dbcmd('zmq_wait') # wait for them to go up allargs = [(job_id, oqparam, exports, log_level, log_file) for job_id, oqparam in jobparams] if jobarray: with start_many(eng.run_calc, allargs): pass else: for args in allargs: eng.run_calc(*args) finally: if dist == 'zmq' and config.zworkers['host_cores']: logging.info('Stopping the zworkers') logs.dbcmd('zmq_stop') elif dist.startswith('celery'): eng.celery_cleanup(config.distribution.terminate_workers_on_revoke) return jobparams