def run_calc(job, log_level, log_file, exports, job_type): """ Run a calculation. :param job: :class:`openquake.engine.db.model.OqJob` instance which references a valid :class:`openquake.engine.db.models.RiskCalculation` or :class:`openquake.engine.db.models.HazardCalculation`. :param str log_level: The desired logging level. Valid choices are 'debug', 'info', 'progress', 'warn', 'error', and 'critical'. :param str log_file: Complete path (including file name) to file where logs will be written. If `None`, logging will just be printed to standard output. :param list exports: A (potentially empty) list of export targets. Currently only "xml" is supported. :param str job_type: 'hazard' or 'risk' """ calc_mode = getattr(job, '%s_calculation' % job_type).calculation_mode calculator = get_calculator_class(job_type, calc_mode)(job) calc = job.calculation # initialize log handlers handler = (LogFileHandler(job_type, calc, log_file) if log_file else LogStreamHandler(job_type, calc)) logging.root.addHandler(handler) try: with job_stats(job): # run the job logs.set_level(log_level) _do_run_calc(job, exports, calculator, job_type) finally: logging.root.removeHandler(handler) return job
def wrapped(*args): """ Initialize logs, make sure the job is still running, and run the task code surrounded by a try-except. If any error occurs, log it as a critical failure. """ # job_id is always assumed to be the first argument job_id = args[0] job = models.OqJob.objects.get(id=job_id) if job.is_running is False: # the job was killed, it is useless to run the task raise JobNotRunning(job_id) # it is important to save the task id soon, so that # the revoke functionality can work EnginePerformanceMonitor.store_task_id(job_id, tsk) with EnginePerformanceMonitor( 'total ' + task_func.__name__, job_id, tsk, flush=True): # tasks write on the celery log file logs.set_level(job.log_level) check_mem_usage() # log a warning if too much memory is used try: # run the task return task_func(*args) finally: # save on the db CacheInserter.flushall() # the task finished, we can remove from the performance # table the associated row 'storing task id' models.Performance.objects.filter( oq_job=job, operation='storing task id', task_id=tsk.request.id).delete()
def pre_execute(job_ini): """ Run a hazard calculation, but stops it immediately after the pre_execute phase. In this way it is possible to determine the input_weight and output_weight of the calculation without running it. """ job = job_from_file(job_ini, getpass.getuser(), 'info', []) calc_mode = job.hazard_calculation.calculation_mode calculator = get_calculator_class('hazard', calc_mode)(job) handler = LogStreamHandler(job) logging.root.addHandler(handler) logs.set_level('info') t0 = time.time() try: calculator.pre_execute() finally: duration = time.time() - t0 logs.LOG.info('Pre_execute time: %s s', duration) logging.root.removeHandler(handler)
def engine(log_file, no_distribute, yes, config_file, make_html_report, upgrade_db, version_db, what_if_I_upgrade, run_hazard, run_risk, run, list_hazard_calculations, list_risk_calculations, delete_calculation, delete_uncompleted_calculations, hazard_calculation_id, list_outputs, show_log, export_output, export_outputs, exports='', log_level='info'): """ Run a calculation using the traditional command line API """ config.abort_if_no_config_available() if run or run_hazard or run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = os.path.abspath( os.path.expanduser(config_file)) config.refresh() if no_distribute: os.environ['OQ_DISTRIBUTE'] = 'no' # check if the datadir exists if not os.path.exists(datastore.DATADIR): os.makedirs(datastore.DATADIR) dbserver.ensure_on() if upgrade_db: logs.set_level('info') msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts') if msg.startswith('Your database is already updated'): pass elif yes or confirm('Proceed? (y/n) '): logs.dbcmd('upgrade_db') sys.exit(0) if version_db: print(logs.dbcmd('version_db')) sys.exit(0) if what_if_I_upgrade: print(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts')) sys.exit(0) # check if the db is outdated outdated = logs.dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk if hazard_calculation_id: hc_id = get_job_id(hazard_calculation_id) else: hc_id = None if run: job_inis = [os.path.expanduser(ini) for ini in run.split(',')] if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = os.path.expanduser(log_file) \ if log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], log_level, log_file, exports) # run risk run_job(job_inis[1], log_level, log_file, exports, hazard_calculation_id=job_id) else: run_job( os.path.expanduser(run), log_level, log_file, exports, hazard_calculation_id=hc_id) # hazard elif list_hazard_calculations: for line in logs.dbcmd( 'list_calculations', 'hazard', getpass.getuser()): print(line) elif run_hazard is not None: print('WARN: --rh/--run-hazard are deprecated, use --run instead', file=sys.stderr) log_file = os.path.expanduser(log_file) \ if log_file is not None else None run_job(os.path.expanduser(run_hazard), log_level, log_file, exports) elif delete_calculation is not None: del_calculation(delete_calculation, yes) # risk elif list_risk_calculations: for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()): print(line) elif run_risk is not None: print('WARN: --rr/--run-risk are deprecated, use --run instead', file=sys.stderr) if hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = os.path.expanduser(log_file) \ if log_file is not None else None run_job( os.path.expanduser(run_risk), log_level, log_file, exports, hazard_calculation_id=hc_id) # export elif make_html_report: print('Written %s' % make_report(make_html_report)) sys.exit(0) elif list_outputs is not None: hc_id = get_job_id(list_outputs) for line in logs.dbcmd('list_outputs', hc_id): print(line) elif show_log is not None: hc_id = get_job_id(show_log) for line in logs.dbcmd('get_log', hc_id): print(line) elif export_output is not None: output_id, target_dir = export_output dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id)) for line in core.export_output( dskey, calc_id, datadir, os.path.expanduser(target_dir), exports or 'xml,csv'): print(line) elif export_outputs is not None: job_id, target_dir = export_outputs hc_id = get_job_id(job_id) for line in core.export_outputs( hc_id, os.path.expanduser(target_dir), exports or 'xml,csv'): print(line) elif delete_uncompleted_calculations: logs.dbcmd('delete_uncompleted_calculations', getpass.getuser()) else: engine.parentparser.prog = 'oq engine' engine.parentparser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() if args.version: print __version__ sys.exit(0) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.upgrade_db: logging.basicConfig(level=logging.INFO) logs.set_level('info') conn = models.getcursor('admin').connection msg = upgrade_manager.what_if_I_upgrade( conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: conn = models.getcursor('admin').connection print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: conn = models.getcursor('admin').connection print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) if args.list_inputs: list_inputs(args.list_inputs) # hazard elif args.list_hazard_calculations: list_calculations(models.OqJob.objects) elif args.list_hazard_outputs is not None: engine.list_hazard_outputs(args.list_hazard_outputs) elif args.export_hazard is not None: output_id, target_dir = args.export_hazard output_id = int(output_id) export_hazard(output_id, expanduser(target_dir), args.export_type) elif args.export_hazard_outputs is not None: hc_id, target_dir = args.export_hazard_outputs export_hazard_outputs(int(hc_id), expanduser(target_dir), args.export_type) elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_hazard_calculation is not None: del_haz_calc(args.delete_hazard_calculation, args.yes) # risk elif args.list_risk_calculations: list_calculations(models.RiskCalculation.objects) elif args.list_risk_outputs is not None: engine.list_risk_outputs(args.list_risk_outputs) elif args.export_risk is not None: output_id, target_dir = args.export_risk export_risk(output_id, expanduser(target_dir), args.export_type) elif args.export_risk_outputs is not None: rc_id, target_dir = args.export_risk_outputs export_risk_outputs(int(rc_id), expanduser(target_dir), args.export_type) elif args.run_risk is not None: if (args.hazard_output_id is None and args.hazard_calculation_id is None): sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=args.hazard_calculation_id) elif args.delete_risk_calculation is not None: del_risk_calc(args.delete_risk_calculation, args.yes) # import elif args.load_gmf is not None: with open(args.load_gmf) as f: out = import_gmf_scenario(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.load_curve is not None: with open(args.load_curve) as f: out = import_hazard_curves(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.list_imported_outputs: list_imported_outputs() elif args.delete_uncompleted_calculations: delete_uncompleted_calculations() elif args.save_hazard_calculation: save_hazards.main(*args.save_hazard_calculation) elif args.load_hazard_calculation: hc_ids = load_hazards.hazard_load( models.getcursor('admin').connection, args.load_hazard_calculation) print "Load hazard calculation with IDs: %s" % hc_ids else: arg_parser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print __version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.make_html_report: conn = models.getcursor('admin').connection print 'Written', make_report(conn, args.make_html_report) sys.exit(0) if args.upgrade_db: logs.set_level('info') conn = models.getcursor('admin').connection msg = upgrade_manager.what_if_I_upgrade( conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: conn = models.getcursor('admin').connection print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: conn = models.getcursor('admin').connection print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) run_job = engine.run_job_lite if args.lite else engine.run_job if args.lite and args.hazard_output_id: sys.exit('The --hazard-output-id option is not supported with the ' '--lite option') if args.list_inputs: list_inputs(args.list_inputs) # hazard or hazard+risk hc_id = args.hazard_calculation_id if hc_id and int(hc_id) < 0: # make it possible commands like `oq-engine --run job_risk.ini --hc -1` hc_id = get_hc_id(int(hc_id)) if args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if len(job_inis) == 2: # run hazard job = run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job.id) else: run_job( expanduser(args.run), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=hc_id) # hazard elif args.list_hazard_calculations: list_calculations('hazard') elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_hazard_calculation is not None: del_calc(args.delete_hazard_calculation, args.yes) # risk elif args.list_risk_calculations: list_calculations('risk') elif args.run_risk is not None: if (args.hazard_output_id is None and args.hazard_calculation_id is None): sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job( expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=hc_id) elif args.delete_risk_calculation is not None: del_calc(args.delete_risk_calculation, args.yes) # export elif args.list_outputs is not None: engine.list_outputs(get_hc_id(args.list_outputs)) elif args.show_view is not None: job_id, view_name = args.show_view print views.view(view_name, get_hc_id(job_id)) elif args.list_hazard_outputs is not None: deprecate('--list-hazard-outputs', '--list-outputs') engine.list_outputs(args.list_hazard_outputs) elif args.list_risk_outputs is not None: deprecate('--list-risk-outputs', '--list-outputs') engine.list_outputs(args.list_risk_outputs) elif args.export_output is not None: output_id, target_dir = args.export_output export(int(output_id), expanduser(target_dir), exports) elif args.export_hazard_output is not None: deprecate('--export-hazard-output', '--export-output') output_id, target_dir = args.export_hazard_output export(int(output_id), expanduser(target_dir), exports) elif args.export_risk_output is not None: deprecate('--export-risk-output', '--export-output') output_id, target_dir = args.export_risk_output export(int(output_id), expanduser(target_dir), exports) elif args.export_outputs is not None: job_id, target_dir = args.export_outputs export_outputs(get_hc_id(job_id), expanduser(target_dir), exports) elif args.export_stats is not None: job_id, target_dir, output_type = args.export_stats export_stats(get_hc_id(job_id), expanduser(target_dir), output_type, exports) # deprecated elif args.export_hazard_outputs is not None: deprecate('--export-hazard-outputs', '--export-outputs') job_id, target_dir = args.export_hazard_outputs export_outputs(get_hc_id(job_id), expanduser(target_dir), exports) elif args.export_risk_outputs is not None: deprecate('--export-risk-outputs', '--export-outputs') job_id, target_dir = args.export_risk_outputs export_outputs(get_hc_id(job_id), expanduser(target_dir), exports) # import elif args.load_curve is not None: with open(args.load_curve) as f: out = import_hazard_curves(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.list_imported_outputs: list_imported_outputs() elif args.delete_uncompleted_calculations: delete_uncompleted_calculations() elif args.save_hazard_calculation: save_hazards.main(*args.save_hazard_calculation) elif args.load_hazard_calculation: job_ids = load_hazards.hazard_load( models.getcursor('admin').connection, args.load_hazard_calculation) print "Load hazard calculation with IDs: %s" % job_ids else: arg_parser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print __version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.make_html_report: conn = models.getcursor('admin').connection print 'Written', make_report(conn, args.make_html_report) sys.exit(0) if args.upgrade_db: logs.set_level('info') conn = models.getcursor('admin').connection msg = upgrade_manager.what_if_I_upgrade(conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: conn = models.getcursor('admin').connection print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: conn = models.getcursor('admin').connection print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) if args.list_inputs: list_inputs(args.list_inputs) # hazard or hazard+risk elif args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if args.lite: # run hazard and risk together engine.run_job_lite(job_inis, args.log_level, log_file, args.exports) else: # run hazard job = engine.run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk if len(job_inis) == 2: engine.run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job.id) # hazard elif args.list_hazard_calculations: list_calculations('hazard') elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_hazard_calculation is not None: del_calc(args.delete_hazard_calculation, args.yes) # risk elif args.list_risk_calculations: list_calculations('risk') elif args.run_risk is not None: if (args.hazard_output_id is None and args.hazard_calculation_id is None): sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=args.hazard_calculation_id) elif args.delete_risk_calculation is not None: del_calc(args.delete_risk_calculation, args.yes) # export elif args.list_outputs is not None: engine.list_outputs(args.list_outputs) elif args.list_hazard_outputs is not None: deprecate('--list-hazard-outputs', '--list-outputs') engine.list_outputs(args.list_hazard_outputs) elif args.list_risk_outputs is not None: deprecate('--list-risk-outputs', '--list-outputs') engine.list_outputs(args.list_risk_outputs) elif args.export_output is not None: output_id, target_dir = args.export_output export(int(output_id), expanduser(target_dir), exports) elif args.export_hazard_output is not None: deprecate('--export-hazard-output', '--export-output') output_id, target_dir = args.export_hazard_output export(int(output_id), expanduser(target_dir), exports) elif args.export_risk_output is not None: deprecate('--export-hazard-output', '--export-output') output_id, target_dir = args.export_risk_output export(int(output_id), expanduser(target_dir), exports) elif args.export_outputs is not None: job_id, target_dir = args.export_outputs export_outputs(int(job_id), expanduser(target_dir), exports) elif args.export_stats is not None: job_id, target_dir, output_type = args.export_stats export_stats(int(job_id), expanduser(target_dir), output_type, exports) # deprecated elif args.export_hazard_outputs is not None: deprecate('--export-hazard-outputs', '--export-outputs') job_id, target_dir = args.export_hazard_outputs export_outputs(int(job_id), expanduser(target_dir), exports) elif args.export_risk_outputs is not None: deprecate('--export-risk-outputs', '--export-outputs') job_id, target_dir = args.export_risk_outputs export_outputs(int(job_id), expanduser(target_dir), exports) # import elif args.load_gmf is not None: with open(args.load_gmf) as f: out = import_gmf_scenario(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.load_curve is not None: with open(args.load_curve) as f: out = import_hazard_curves(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.list_imported_outputs: list_imported_outputs() elif args.delete_uncompleted_calculations: delete_uncompleted_calculations() elif args.save_hazard_calculation: save_hazards.main(*args.save_hazard_calculation) elif args.load_hazard_calculation: job_ids = load_hazards.hazard_load( models.getcursor('admin').connection, args.load_hazard_calculation) print "Load hazard calculation with IDs: %s" % job_ids else: arg_parser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print openquake.engine.__version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ['OQ_DISTRIBUTE'] = 'no' # check if the DbServer is up sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: err = sock.connect_ex(config.DBS_ADDRESS) finally: sock.close() if err: multi_user = valid.boolean(config.get('dbserver', 'multi_user')) if multi_user: sys.exit('Please start the DbServer: ' 'see the documentation for details') # otherwise start the DbServer automatically dblog = os.path.expanduser('~/oq-dbserver.log') subprocess.Popen([sys.executable, '-m', 'openquake.server.dbserver', '-l', 'INFO'], stderr=open(dblog, 'w')) if args.upgrade_db: logs.set_level('info') msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts') if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): logs.dbcmd('upgrade_db') sys.exit(0) if args.version_db: print logs.dbcmd('version_db') sys.exit(0) if args.what_if_I_upgrade: print logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts') sys.exit(0) # check if the db is outdated outdated = logs.dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk if args.hazard_calculation_id: hc_id = get_job_id(args.hazard_calculation_id) else: hc_id = None if args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job_id) else: run_job( expanduser(args.run), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # hazard elif args.list_hazard_calculations: for line in logs.dbcmd( 'list_calculations', 'hazard', getpass.getuser()): print line elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_calculation is not None: delete_calculation(args.delete_calculation, args.yes) # risk elif args.list_risk_calculations: for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()): print line elif args.run_risk is not None: if args.hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job( expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # export elif args.make_html_report: print 'Written', make_report(args.make_html_report) sys.exit(0) elif args.list_outputs is not None: hc_id = get_job_id(args.list_outputs) for line in logs.dbcmd('list_outputs', hc_id): print line elif args.show_view is not None: job_id, view_name = args.show_view print views.view(view_name, datastore.read(int(job_id))) elif args.show_log is not None: hc_id = get_job_id(args.show_log[0]) for line in logs.dbcmd('get_log', hc_id): print line elif args.export_output is not None: output_id, target_dir = args.export_output dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id)) for line in core.export_output( dskey, calc_id, datadir, expanduser(target_dir), exports): print line elif args.export_outputs is not None: job_id, target_dir = args.export_outputs hc_id = get_job_id(job_id) for line in core.export_outputs( hc_id, expanduser(target_dir), exports): print line elif args.delete_uncompleted_calculations: logs.dbcmd('delete_uncompleted_calculations', getpass.getuser()) else: arg_parser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print __version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[utils.config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) utils.config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.upgrade_db: logs.set_level('info') msg = upgrade_manager.what_if_I_upgrade( conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or utils.confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) # check if the db is outdated outdated = dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk hc_id = args.hazard_calculation_id if hc_id and int(hc_id) < 0: # make it possible commands like `oq-engine --run job_risk.ini --hc -1` hc_id = dbcmd('get_hc_id', int(hc_id)) if args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job_id) else: run_job( expanduser(args.run), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # hazard elif args.list_hazard_calculations: dbcmd('list_calculations', 'hazard') elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_calculation is not None: dbcmd('delete_calculation', args.delete_calculation, args.yes) # risk elif args.list_risk_calculations: dbcmd('list_calculations', 'risk') elif args.run_risk is not None: if args.hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job( expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # export elif args.make_html_report: print 'Written', make_report(conn, args.make_html_report) sys.exit(0) elif args.list_outputs is not None: hc_id = dbcmd('get_hc_id', args.list_outputs) dbcmd('list_outputs', hc_id) elif args.show_view is not None: job_id, view_name = args.show_view print views.view(view_name, datastore.read(int(job_id))) elif args.show_log is not None: hc_id = dbcmd('get_hc_id', args.show_log[0]) print dbcmd('get_log', hc_id) elif args.export_output is not None: output_id, target_dir = args.export_output dbcmd('export_output', int(output_id), expanduser(target_dir), exports) elif args.export_outputs is not None: job_id, target_dir = args.export_outputs hc_id = dbcmd('get_hc_id', job_id) dbcmd('export_outputs', hc_id, expanduser(target_dir), exports) elif args.delete_uncompleted_calculations: dbcmd('delete_uncompleted_calculations') else: arg_parser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print __version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ['OQ_DISTRIBUTE'] = 'no' if args.upgrade_db: logs.set_level('info') msg = upgrade_manager.what_if_I_upgrade( conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or utils.confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) # check if the db is outdated outdated = dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk hc_id = args.hazard_calculation_id if hc_id: hc_id = int(hc_id) if hc_id < 0: # make it possible to run `oq-engine --run job_risk.ini --hc -1` hc_id = dbcmd('get_hc_id', hc_id) if args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job_id) else: run_job( expanduser(args.run), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # hazard elif args.list_hazard_calculations: dbcmd('list_calculations', 'hazard') elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_calculation is not None: dbcmd('delete_calculation', args.delete_calculation, args.yes) # risk elif args.list_risk_calculations: dbcmd('list_calculations', 'risk') elif args.run_risk is not None: if args.hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job( expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # export elif args.make_html_report: print 'Written', make_report(conn, args.make_html_report) sys.exit(0) elif args.list_outputs is not None: hc_id = dbcmd('get_hc_id', args.list_outputs) dbcmd('list_outputs', hc_id) elif args.show_view is not None: job_id, view_name = args.show_view print views.view(view_name, datastore.read(int(job_id))) elif args.show_log is not None: hc_id = dbcmd('get_hc_id', args.show_log[0]) print dbcmd('get_log', hc_id) elif args.export_output is not None: output_id, target_dir = args.export_output dbcmd('export_output', int(output_id), expanduser(target_dir), exports) elif args.export_outputs is not None: job_id, target_dir = args.export_outputs hc_id = dbcmd('get_hc_id', job_id) dbcmd('export_outputs', hc_id, expanduser(target_dir), exports) elif args.delete_uncompleted_calculations: dbcmd('delete_uncompleted_calculations') else: arg_parser.print_usage()