def get_calcs(request_get_dict, user_name, user_acl_on=False, id=None): """ :returns: list of tuples (job_id, user_name, job_status, job_type, job_is_running, job_description) """ # helper to get job+calculation data from the oq-engine database jobs = models.OqJob.objects.filter() # user_acl_on is true if settings.ACL_ON = True or when the user is a # Django super user if user_acl_on: jobs = jobs.filter(user_name=user_name) if id is not None: jobs = jobs.filter(id=id) if 'job_type' in request_get_dict: job_type = request_get_dict.get('job_type') jobs = jobs.filter(hazard_calculation__isnull=job_type == 'hazard') if 'is_running' in request_get_dict: is_running = request_get_dict.get('is_running') jobs = jobs.filter(is_running=valid.boolean(is_running)) if 'relevant' in request_get_dict: relevant = request_get_dict.get('relevant') jobs = jobs.filter(relevant=valid.boolean(relevant)) return [(job.id, job.user_name, job.status, job.job_type, job.is_running, job.description) for job in jobs.order_by('-id')]
def get_calcs(request_get_dict, user_name, user_acl_on=False, id=None): """ :returns: list of tuples (job_id, user_name, job_status, job_type, job_is_running, job_description) """ # helper to get job+calculation data from the oq-engine database jobs = models.OqJob.objects.filter() # user_acl_on is true if settings.ACL_ON = True or when the user is a # Django super user if user_acl_on: jobs = jobs.filter(user_name=user_name) if id is not None: jobs = jobs.filter(id=id) if 'job_type' in request_get_dict: job_type = request_get_dict.get('job_type') jobs = jobs.filter(hazard_calculation__isnull=job_type == 'hazard') if 'is_running' in request_get_dict: is_running = request_get_dict.get('is_running') jobs = jobs.filter(is_running=valid.boolean(is_running)) if 'relevant' in request_get_dict: relevant = request_get_dict.get('relevant') jobs = jobs.filter(relevant=valid.boolean(relevant)) return [(job.id, job.user_name, job.status, job.job_type, job.is_running, job.description) for job in jobs.order_by('-id')]
def get_calcs(db, request_get_dict, user_name, user_acl_on=False, id=None): """ :param db: a :class:`openquake.server.dbapi.Db` instance :param request_get_dict: a dictionary :param user_name: user name :param user_acl_on: if True, returns only the calculations owned by the user :param id: if given, extract only the specified calculation :returns: list of tuples (job_id, user_name, job_status, job_type, job_is_running, job_description) """ # helper to get job+calculation data from the oq-engine database filterdict = {} # user_acl_on is true if settings.ACL_ON = True or when the user is a # Django super user if user_acl_on: filterdict['user_name'] = user_name if id is not None: filterdict['id'] = id if 'job_type' in request_get_dict: filterdict['job_type'] = request_get_dict.get('job_type') if 'is_running' in request_get_dict: is_running = request_get_dict.get('is_running') filterdict['is_running'] = valid.boolean(is_running) if 'relevant' in request_get_dict: relevant = request_get_dict.get('relevant') filterdict['relevant'] = valid.boolean(relevant) if 'limit' in request_get_dict: limit = int(request_get_dict.get('limit')) else: limit = 100 if 'start_time' in request_get_dict: start = request_get_dict.get('start_time') # assume an ISO string else: # consider only calculations younger than 1 month # ISO string with format YYYY-MM-DD start = (datetime.today() - timedelta(30)).isoformat()[:10] time_filter = "start_time >= '%s'" % start jobs = db('SELECT *, %s FROM job WHERE ?A AND %s ORDER BY id DESC LIMIT %d' % (JOB_TYPE, time_filter, limit), filterdict) return [(job.id, job.user_name, job.status, job.job_type, job.is_running, job.description) for job in jobs]
def dbserver(cmd): """ start/stop/restart the database server, or return its status """ if valid.boolean(config.get('dbserver', 'multi_user')): sys.exit('oq dbserver only works in single user mode') status = get_status() if cmd == 'status': print(status) elif cmd == 'stop': if status == 'running': logs.dbcmd('stop') print('stopped') else: print('already stopped') elif cmd == 'start': if status == 'not-running': runserver() print('started') else: print('already running') elif cmd == 'restart': if status == 'running': logs.dbcmd('stop') print('stopped') runserver() print('started')
def float_or_flag(value, isAbsolute=None): """ Validate the attributes/tags insuranceLimit and deductible """ if isAbsolute is None: # considering the insuranceLimit attribute return valid.positivefloat(value) else: return valid.boolean(isAbsolute)
def webui(cmd, hostport='127.0.0.1:8800'): """ start the webui server in foreground or perform other operation on the django application """ dbstatus = get_status() if dbstatus == 'not-running': if valid.boolean(config.get('dbserver', 'multi_user')): sys.exit('Please start the DbServer: ' 'see the documentation for details') dbserver.runserver() if cmd == 'start': rundjango('runserver', hostport) elif cmd == 'syncdb': rundjango('syncdb')
def ensure_on(): """ Start the DbServer if it is off """ if get_status() == 'not-running': if valid.boolean(config.get('dbserver', 'multi_user')): sys.exit('Please start the DbServer: ' 'see the documentation for details') # otherwise start the DbServer automatically subprocess.Popen([sys.executable, '-m', 'openquake.server.dbserver', '-l', 'INFO']) # wait for the dbserver to start waiting_seconds = 10 while get_status() == 'not-running': if waiting_seconds == 0: sys.exit('The DbServer cannot be started after 10 seconds. ' 'Please check the configuration') time.sleep(1) waiting_seconds -= 1
# OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. import os from openquake.baselib.performance import Monitor from openquake.risklib import valid from openquake.engine import config SOFT_MEM_LIMIT = int(config.get('memory', 'soft_mem_limit')) HARD_MEM_LIMIT = int(config.get('memory', 'hard_mem_limit')) USE_CELERY = valid.boolean(config.get('celery', 'use_celery') or 'false') if USE_CELERY: os.environ['OQ_DISTRIBUTE'] = 'celery' # NB: this import must go AFTER the setting of OQ_DISTRIBUTE from openquake.commonlib import parallel parallel.check_mem_usage.__defaults__ = ( Monitor(), SOFT_MEM_LIMIT, HARD_MEM_LIMIT) def confirm(prompt): """ Ask for confirmation, given a ``prompt`` and return a boolean value. """
calculations.""" import os import sys import signal import traceback from openquake.baselib.performance import Monitor from openquake.risklib import valid from openquake.commonlib import parallel, readinput from openquake.commonlib.oqvalidation import OqParam from openquake.commonlib import export, datastore from openquake.calculators import base, views from openquake.engine import logs, config TERMINATE = valid.boolean( config.get('celery', 'terminate_workers_on_revoke') or 'false') USE_CELERY = valid.boolean(config.get('celery', 'use_celery') or 'false') if USE_CELERY: import celery.task.control def set_concurrent_tasks_default(): """ Set the default for concurrent_tasks. Returns the number of live celery nodes (i.e. the number of machines). """ stats = celery.task.control.inspect(timeout=1).stats() if not stats: sys.exit("No live compute nodes, aborting calculation") num_cores = sum(stats[k]['pool']['max-concurrency'] for k in stats)
def engine(log_file, no_distribute, yes, config_file, make_html_report, upgrade_db, version_db, what_if_I_upgrade, run_hazard, run_risk, run, list_hazard_calculations, list_risk_calculations, delete_calculation, delete_uncompleted_calculations, hazard_calculation_id, list_outputs, show_log, export_output, export_outputs, exports='', log_level='info'): """ Run a calculation using the traditional command line API """ config.abort_if_no_config_available() if run or run_hazard or run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = os.path.abspath( os.path.expanduser(config_file)) config.refresh() if no_distribute: os.environ['OQ_DISTRIBUTE'] = 'no' # check if the datadir exists if not os.path.exists(datastore.DATADIR): os.makedirs(datastore.DATADIR) # check if the DbServer is up sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: err = sock.connect_ex(config.DBS_ADDRESS) finally: sock.close() if err: multi_user = valid.boolean(config.get('dbserver', 'multi_user')) if multi_user: sys.exit('Please start the DbServer: ' 'see the documentation for details') # otherwise start the DbServer automatically subprocess.Popen([sys.executable, '-m', 'openquake.server.dbserver', '-l', 'INFO']) if upgrade_db: logs.set_level('info') msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts') if msg.startswith('Your database is already updated'): pass elif yes or confirm('Proceed? (y/n) '): logs.dbcmd('upgrade_db') sys.exit(0) if version_db: print(logs.dbcmd('version_db')) sys.exit(0) if what_if_I_upgrade: print(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts')) sys.exit(0) # check if the db is outdated outdated = logs.dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk if hazard_calculation_id: hc_id = get_job_id(hazard_calculation_id) else: hc_id = None if run: job_inis = map(os.path.expanduser, run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = os.path.expanduser(log_file) \ if log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], log_level, log_file, exports) # run risk run_job(job_inis[1], log_level, log_file, exports, hazard_calculation_id=job_id) else: run_job( os.path.expanduser(run), log_level, log_file, exports, hazard_calculation_id=hc_id) # hazard elif list_hazard_calculations: for line in logs.dbcmd( 'list_calculations', 'hazard', getpass.getuser()): print(line) elif run_hazard is not None: print('WARN: --rh/--run-hazard are deprecated, use --run instead', file=sys.stderr) log_file = os.path.expanduser(log_file) \ if log_file is not None else None run_job(os.path.expanduser(run_hazard), log_level, log_file, exports) elif delete_calculation is not None: delete_calculation(delete_calculation, yes) # risk elif list_risk_calculations: for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()): print(line) elif run_risk is not None: print('WARN: --rr/--run-risk are deprecated, use --run instead', file=sys.stderr) if hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = os.path.expanduser(log_file) \ if log_file is not None else None run_job( os.path.expanduser(run_risk), log_level, log_file, exports, hazard_calculation_id=hc_id) # export elif make_html_report: print('Written %s' % make_report(make_html_report)) sys.exit(0) elif list_outputs is not None: hc_id = get_job_id(list_outputs) for line in logs.dbcmd('list_outputs', hc_id): print(line) elif show_log is not None: hc_id = get_job_id(show_log) for line in logs.dbcmd('get_log', hc_id): print(line) elif export_output is not None: output_id, target_dir = export_output dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id)) for line in core.export_output( dskey, calc_id, datadir, os.path.expanduser(target_dir), exports): print(line) elif export_outputs is not None: job_id, target_dir = export_outputs hc_id = get_job_id(job_id) for line in core.export_outputs( hc_id, os.path.expanduser(target_dir), exports): print(line) elif delete_uncompleted_calculations: logs.dbcmd('delete_uncompleted_calculations', getpass.getuser()) else: parser.parentparser.prog = 'oq engine' parser.parentparser.print_usage()
from __future__ import print_function import io import os import getpass import logging from openquake.hazardlib.calc.hazard_curve import zero_curves from openquake.baselib import sap from openquake.risklib import scientific, valid from openquake.commonlib import datastore from openquake.commonlib.writers import write_csv from openquake.commonlib.util import rmsep from openquake.engine import config, logs from openquake.calculators.views import view MULTI_USER = valid.boolean(config.get('dbserver', 'multi_user') or 'false') if MULTI_USER: # get the datastore of the user who ran the job def read(calc_id): job = logs.dbcmd('get_job', calc_id, getpass.getuser()) datadir = os.path.dirname(job.ds_calc_dir) return datastore.read(job.id, datadir=datadir) else: # get the datastore of the current user read = datastore.read def get_hcurves_and_means(dstore): """ Extract hcurves from the datastore and compute their means. :returns: curves_by_rlz, mean_curves
# OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. import os from openquake.baselib.performance import Monitor from openquake.risklib import valid from openquake.engine import config SOFT_MEM_LIMIT = int(config.get('memory', 'soft_mem_limit')) HARD_MEM_LIMIT = int(config.get('memory', 'hard_mem_limit')) USE_CELERY = valid.boolean(config.get('celery', 'use_celery') or 'false') if USE_CELERY: os.environ['OQ_DISTRIBUTE'] = 'celery' # NB: this import must go AFTER the setting of OQ_DISTRIBUTE from openquake.commonlib import parallel parallel.check_mem_usage.__defaults__ = (Monitor(), SOFT_MEM_LIMIT, HARD_MEM_LIMIT) def confirm(prompt): """ Ask for confirmation, given a ``prompt`` and return a boolean value. """
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print openquake.engine.__version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ['OQ_DISTRIBUTE'] = 'no' # check if the DbServer is up sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: err = sock.connect_ex(config.DBS_ADDRESS) finally: sock.close() if err: multi_user = valid.boolean(config.get('dbserver', 'multi_user')) if multi_user: sys.exit('Please start the DbServer: ' 'see the documentation for details') # otherwise start the DbServer automatically dblog = os.path.expanduser('~/oq-dbserver.log') subprocess.Popen([sys.executable, '-m', 'openquake.server.dbserver', '-l', 'INFO'], stderr=open(dblog, 'w')) if args.upgrade_db: logs.set_level('info') msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts') if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): logs.dbcmd('upgrade_db') sys.exit(0) if args.version_db: print logs.dbcmd('version_db') sys.exit(0) if args.what_if_I_upgrade: print logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts') sys.exit(0) # check if the db is outdated outdated = logs.dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk if args.hazard_calculation_id: hc_id = get_job_id(args.hazard_calculation_id) else: hc_id = None if args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job_id) else: run_job( expanduser(args.run), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # hazard elif args.list_hazard_calculations: for line in logs.dbcmd( 'list_calculations', 'hazard', getpass.getuser()): print line elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_calculation is not None: delete_calculation(args.delete_calculation, args.yes) # risk elif args.list_risk_calculations: for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()): print line elif args.run_risk is not None: if args.hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job( expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # export elif args.make_html_report: print 'Written', make_report(args.make_html_report) sys.exit(0) elif args.list_outputs is not None: hc_id = get_job_id(args.list_outputs) for line in logs.dbcmd('list_outputs', hc_id): print line elif args.show_view is not None: job_id, view_name = args.show_view print views.view(view_name, datastore.read(int(job_id))) elif args.show_log is not None: hc_id = get_job_id(args.show_log[0]) for line in logs.dbcmd('get_log', hc_id): print line elif args.export_output is not None: output_id, target_dir = args.export_output dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id)) for line in core.export_output( dskey, calc_id, datadir, expanduser(target_dir), exports): print line elif args.export_outputs is not None: job_id, target_dir = args.export_outputs hc_id = get_job_id(job_id) for line in core.export_outputs( hc_id, expanduser(target_dir), exports): print line elif args.delete_uncompleted_calculations: logs.dbcmd('delete_uncompleted_calculations', getpass.getuser()) else: arg_parser.print_usage()
calculations.""" import os import sys import signal import traceback from openquake.baselib.performance import Monitor from openquake.risklib import valid from openquake.commonlib import parallel, readinput from openquake.commonlib.oqvalidation import OqParam from openquake.commonlib import export, datastore, views from openquake.calculators import base from openquake.engine import logs, config TERMINATE = valid.boolean( config.get('celery', 'terminate_workers_on_revoke') or 'false') USE_CELERY = valid.boolean(config.get('celery', 'use_celery') or 'false') if USE_CELERY: import celery.task.control def set_concurrent_tasks_default(): """ Set the default for concurrent_tasks to twice the number of workers. Returns the number of live celery nodes (i.e. the number of machines). """ stats = celery.task.control.inspect(timeout=1).stats() if not stats: sys.exit("No live compute nodes, aborting calculation") num_cores = sum(stats[k]['pool']['max-concurrency'] for k in stats)