def test_get_with_empty_section_data(self): # config.get() returns `None` if the section data dict is empty with patch('openquake.engine.config.get_section') as mock: mock.return_value = dict() self.assertTrue(config.get("whatever", "key") is None) self.assertEqual(1, mock.call_count) self.assertEqual([("whatever",), {}], mock.call_args)
def test_get_with_unknown_key(self): """config.get() returns `None` if the `key` is not known.""" with patch('openquake.engine.config.get_section') as mock: mock.return_value = dict(b=1) self.assertTrue(config.get("arghh", "c") is None) self.assertEqual(1, mock.call_count) self.assertEqual([("arghh", ), {}], mock.call_args)
def test_get_with_empty_section_data(self): # config.get() returns `None` if the section data dict is empty with patch('openquake.engine.config.get_section') as mock: mock.return_value = dict() self.assertTrue(config.get("whatever", "key") is None) self.assertEqual(1, mock.call_count) self.assertEqual([("whatever", ), {}], mock.call_args)
def dbserver(cmd): """ start/stop/restart the database server, or return its status """ if valid.boolean(config.get('dbserver', 'multi_user')): sys.exit('oq dbserver only works in single user mode') status = get_status() if cmd == 'status': print(status) elif cmd == 'stop': if status == 'running': logs.dbcmd('stop') print('stopped') else: print('already stopped') elif cmd == 'start': if status == 'not-running': runserver() print('started') else: print('already running') elif cmd == 'restart': if status == 'running': logs.dbcmd('stop') print('stopped') runserver() print('started')
def test_get_with_unknown_key(self): """config.get() returns `None` if the `key` is not known.""" with patch('openquake.engine.config.get_section') as mock: mock.return_value = dict(b=1) self.assertTrue(config.get("arghh", "c") is None) self.assertEqual(1, mock.call_count) self.assertEqual([("arghh",), {}], mock.call_args)
def test_get_with_nonempty_section_data_and_known_key(self): # config.get() correctly returns the configuration datum for known # sections/keys with patch('openquake.engine.config.get_section') as mock: mock.return_value = dict(a=11) self.assertEqual(11, config.get("hmmm", "a")) self.assertEqual(1, mock.call_count) self.assertEqual([("hmmm", ), {}], mock.call_args)
def test_get_with_nonempty_section_data_and_known_key(self): # config.get() correctly returns the configuration datum for known # sections/keys with patch('openquake.engine.config.get_section') as mock: mock.return_value = dict(a=11) self.assertEqual(11, config.get("hmmm", "a")) self.assertEqual(1, mock.call_count) self.assertEqual([("hmmm",), {}], mock.call_args)
def webui(cmd, hostport='127.0.0.1:8800'): """ start the webui server in foreground or perform other operation on the django application """ dbstatus = get_status() if dbstatus == 'not-running': if valid.boolean(config.get('dbserver', 'multi_user')): sys.exit('Please start the DbServer: ' 'see the documentation for details') dbserver.runserver() if cmd == 'start': rundjango('runserver', hostport) elif cmd == 'syncdb': rundjango('syncdb')
def ensure_on(): """ Start the DbServer if it is off """ if get_status() == 'not-running': if valid.boolean(config.get('dbserver', 'multi_user')): sys.exit('Please start the DbServer: ' 'see the documentation for details') # otherwise start the DbServer automatically subprocess.Popen([sys.executable, '-m', 'openquake.server.dbserver', '-l', 'INFO']) # wait for the dbserver to start waiting_seconds = 10 while get_status() == 'not-running': if waiting_seconds == 0: sys.exit('The DbServer cannot be started after 10 seconds. ' 'Please check the configuration') time.sleep(1) waiting_seconds -= 1
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. """Engine: A collection of fundamental functions for initializing and running calculations.""" import sys import signal import traceback from openquake.baselib.performance import Monitor from openquake.commonlib import valid, parallel from openquake.commonlib.oqvalidation import OqParam from openquake.calculators import base from openquake.engine import logs, config TERMINATE = valid.boolean( config.get('celery', 'terminate_workers_on_revoke') or 'false') USE_CELERY = valid.boolean(config.get('celery', 'use_celery') or 'false') if USE_CELERY: import celery.task.control def set_concurrent_tasks_default(): """ Set the default for concurrent_tasks to twice the number of workers. Returns the number of live celery nodes (i.e. the number of machines). """ stats = celery.task.control.inspect(timeout=1).stats() if not stats: sys.exit("No live compute nodes, aborting calculation") num_cores = sum(stats[k]['pool']['max-concurrency'] for k in stats)
# (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. import os from openquake.baselib.performance import Monitor from openquake.risklib import valid from openquake.engine import config SOFT_MEM_LIMIT = int(config.get('memory', 'soft_mem_limit')) HARD_MEM_LIMIT = int(config.get('memory', 'hard_mem_limit')) USE_CELERY = valid.boolean(config.get('celery', 'use_celery') or 'false') if USE_CELERY: os.environ['OQ_DISTRIBUTE'] = 'celery' # NB: this import must go AFTER the setting of OQ_DISTRIBUTE from openquake.commonlib import parallel parallel.check_mem_usage.__defaults__ = ( Monitor(), SOFT_MEM_LIMIT, HARD_MEM_LIMIT) def confirm(prompt): """
# # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. import os import sys import importlib from openquake.baselib import sap from openquake.commonlib import __version__ from openquake import commands from openquake.risklib import valid from openquake.engine import config USE_CELERY = valid.boolean(config.get('celery', 'use_celery') or 'false') # the environment variable has the precedence over the configuration file if 'OQ_DISTRIBUTE' not in os.environ and USE_CELERY: os.environ['OQ_DISTRIBUTE'] = 'celery' # force cluster users to use `oq engine` so that we have centralized logs if USE_CELERY and 'run' in sys.argv: sys.exit('You are on a cluster and you are using oq run?? ' 'Use oq engine --run instead!') def oq(): modnames = ['openquake.commands.%s' % mod[:-3] for mod in os.listdir(commands.__path__[0]) if mod.endswith('.py') and not mod.startswith('_')] for modname in modnames:
import os import sys import signal import traceback from openquake.baselib.performance import Monitor from openquake.risklib import valid from openquake.commonlib import parallel, readinput from openquake.commonlib.oqvalidation import OqParam from openquake.commonlib import export, datastore from openquake.calculators import base, views from openquake.engine import logs, config TERMINATE = valid.boolean( config.get('celery', 'terminate_workers_on_revoke') or 'false') USE_CELERY = valid.boolean(config.get('celery', 'use_celery') or 'false') if USE_CELERY: import celery.task.control def set_concurrent_tasks_default(): """ Set the default for concurrent_tasks. Returns the number of live celery nodes (i.e. the number of machines). """ stats = celery.task.control.inspect(timeout=1).stats() if not stats: sys.exit("No live compute nodes, aborting calculation") num_cores = sum(stats[k]['pool']['max-concurrency'] for k in stats)
def engine(log_file, no_distribute, yes, config_file, make_html_report, upgrade_db, version_db, what_if_I_upgrade, run_hazard, run_risk, run, list_hazard_calculations, list_risk_calculations, delete_calculation, delete_uncompleted_calculations, hazard_calculation_id, list_outputs, show_log, export_output, export_outputs, exports='', log_level='info'): """ Run a calculation using the traditional command line API """ config.abort_if_no_config_available() if run or run_hazard or run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = os.path.abspath( os.path.expanduser(config_file)) config.refresh() if no_distribute: os.environ['OQ_DISTRIBUTE'] = 'no' # check if the datadir exists if not os.path.exists(datastore.DATADIR): os.makedirs(datastore.DATADIR) # check if the DbServer is up sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: err = sock.connect_ex(config.DBS_ADDRESS) finally: sock.close() if err: multi_user = valid.boolean(config.get('dbserver', 'multi_user')) if multi_user: sys.exit('Please start the DbServer: ' 'see the documentation for details') # otherwise start the DbServer automatically subprocess.Popen([sys.executable, '-m', 'openquake.server.dbserver', '-l', 'INFO']) if upgrade_db: logs.set_level('info') msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts') if msg.startswith('Your database is already updated'): pass elif yes or confirm('Proceed? (y/n) '): logs.dbcmd('upgrade_db') sys.exit(0) if version_db: print(logs.dbcmd('version_db')) sys.exit(0) if what_if_I_upgrade: print(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts')) sys.exit(0) # check if the db is outdated outdated = logs.dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk if hazard_calculation_id: hc_id = get_job_id(hazard_calculation_id) else: hc_id = None if run: job_inis = map(os.path.expanduser, run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = os.path.expanduser(log_file) \ if log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], log_level, log_file, exports) # run risk run_job(job_inis[1], log_level, log_file, exports, hazard_calculation_id=job_id) else: run_job( os.path.expanduser(run), log_level, log_file, exports, hazard_calculation_id=hc_id) # hazard elif list_hazard_calculations: for line in logs.dbcmd( 'list_calculations', 'hazard', getpass.getuser()): print(line) elif run_hazard is not None: print('WARN: --rh/--run-hazard are deprecated, use --run instead', file=sys.stderr) log_file = os.path.expanduser(log_file) \ if log_file is not None else None run_job(os.path.expanduser(run_hazard), log_level, log_file, exports) elif delete_calculation is not None: delete_calculation(delete_calculation, yes) # risk elif list_risk_calculations: for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()): print(line) elif run_risk is not None: print('WARN: --rr/--run-risk are deprecated, use --run instead', file=sys.stderr) if hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = os.path.expanduser(log_file) \ if log_file is not None else None run_job( os.path.expanduser(run_risk), log_level, log_file, exports, hazard_calculation_id=hc_id) # export elif make_html_report: print('Written %s' % make_report(make_html_report)) sys.exit(0) elif list_outputs is not None: hc_id = get_job_id(list_outputs) for line in logs.dbcmd('list_outputs', hc_id): print(line) elif show_log is not None: hc_id = get_job_id(show_log) for line in logs.dbcmd('get_log', hc_id): print(line) elif export_output is not None: output_id, target_dir = export_output dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id)) for line in core.export_output( dskey, calc_id, datadir, os.path.expanduser(target_dir), exports): print(line) elif export_outputs is not None: job_id, target_dir = export_outputs hc_id = get_job_id(job_id) for line in core.export_outputs( hc_id, os.path.expanduser(target_dir), exports): print(line) elif delete_uncompleted_calculations: logs.dbcmd('delete_uncompleted_calculations', getpass.getuser()) else: parser.parentparser.prog = 'oq engine' parser.parentparser.print_usage()
from __future__ import print_function import io import os import getpass import logging from openquake.hazardlib.calc.hazard_curve import zero_curves from openquake.baselib import sap from openquake.risklib import scientific, valid from openquake.commonlib import datastore from openquake.commonlib.writers import write_csv from openquake.commonlib.util import rmsep from openquake.engine import config, logs from openquake.calculators.views import view MULTI_USER = valid.boolean(config.get('dbserver', 'multi_user') or 'false') if MULTI_USER: # get the datastore of the user who ran the job def read(calc_id): job = logs.dbcmd('get_job', calc_id, getpass.getuser()) datadir = os.path.dirname(job.ds_calc_dir) return datastore.read(job.id, datadir=datadir) else: # get the datastore of the current user read = datastore.read def get_hcurves_and_means(dstore): """ Extract hcurves from the datastore and compute their means. :returns: curves_by_rlz, mean_curves
# (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. import os from openquake.baselib.performance import Monitor from openquake.risklib import valid from openquake.engine import config SOFT_MEM_LIMIT = int(config.get('memory', 'soft_mem_limit')) HARD_MEM_LIMIT = int(config.get('memory', 'hard_mem_limit')) USE_CELERY = valid.boolean(config.get('celery', 'use_celery') or 'false') if USE_CELERY: os.environ['OQ_DISTRIBUTE'] = 'celery' # NB: this import must go AFTER the setting of OQ_DISTRIBUTE from openquake.commonlib import parallel parallel.check_mem_usage.__defaults__ = (Monitor(), SOFT_MEM_LIMIT, HARD_MEM_LIMIT) def confirm(prompt): """
# by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. from openquake.baselib.performance import Monitor from openquake.commonlib import parallel from openquake.engine import config SOFT_MEM_LIMIT = int(config.get('memory', 'soft_mem_limit')) HARD_MEM_LIMIT = int(config.get('memory', 'hard_mem_limit')) parallel.check_mem_usage.__defaults__ = ( Monitor(), SOFT_MEM_LIMIT, HARD_MEM_LIMIT) def confirm(prompt): """ Ask for confirmation, given a ``prompt`` and return a boolean value. """ while True: try: answer = raw_input(prompt) except KeyboardInterrupt: # the user presses ctrl+c, just say 'no'
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print openquake.engine.__version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ['OQ_DISTRIBUTE'] = 'no' # check if the DbServer is up sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: err = sock.connect_ex(config.DBS_ADDRESS) finally: sock.close() if err: multi_user = valid.boolean(config.get('dbserver', 'multi_user')) if multi_user: sys.exit('Please start the DbServer: ' 'see the documentation for details') # otherwise start the DbServer automatically dblog = os.path.expanduser('~/oq-dbserver.log') subprocess.Popen([sys.executable, '-m', 'openquake.server.dbserver', '-l', 'INFO'], stderr=open(dblog, 'w')) if args.upgrade_db: logs.set_level('info') msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts') if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): logs.dbcmd('upgrade_db') sys.exit(0) if args.version_db: print logs.dbcmd('version_db') sys.exit(0) if args.what_if_I_upgrade: print logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts') sys.exit(0) # check if the db is outdated outdated = logs.dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk if args.hazard_calculation_id: hc_id = get_job_id(args.hazard_calculation_id) else: hc_id = None if args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job_id) else: run_job( expanduser(args.run), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # hazard elif args.list_hazard_calculations: for line in logs.dbcmd( 'list_calculations', 'hazard', getpass.getuser()): print line elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_calculation is not None: delete_calculation(args.delete_calculation, args.yes) # risk elif args.list_risk_calculations: for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()): print line elif args.run_risk is not None: if args.hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job( expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # export elif args.make_html_report: print 'Written', make_report(args.make_html_report) sys.exit(0) elif args.list_outputs is not None: hc_id = get_job_id(args.list_outputs) for line in logs.dbcmd('list_outputs', hc_id): print line elif args.show_view is not None: job_id, view_name = args.show_view print views.view(view_name, datastore.read(int(job_id))) elif args.show_log is not None: hc_id = get_job_id(args.show_log[0]) for line in logs.dbcmd('get_log', hc_id): print line elif args.export_output is not None: output_id, target_dir = args.export_output dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id)) for line in core.export_output( dskey, calc_id, datadir, expanduser(target_dir), exports): print line elif args.export_outputs is not None: job_id, target_dir = args.export_outputs hc_id = get_job_id(job_id) for line in core.export_outputs( hc_id, expanduser(target_dir), exports): print line elif args.delete_uncompleted_calculations: logs.dbcmd('delete_uncompleted_calculations', getpass.getuser()) else: arg_parser.print_usage()