def engine(log_file, no_distribute, yes, config_file, make_html_report, upgrade_db, version_db, what_if_I_upgrade, run_hazard, run_risk, run, list_hazard_calculations, list_risk_calculations, delete_calculation, delete_uncompleted_calculations, hazard_calculation_id, list_outputs, show_log, export_output, export_outputs, exports='', log_level='info'): """ Run a calculation using the traditional command line API """ config.abort_if_no_config_available() if run or run_hazard or run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = os.path.abspath( os.path.expanduser(config_file)) config.refresh() if no_distribute: os.environ['OQ_DISTRIBUTE'] = 'no' # check if the datadir exists if not os.path.exists(datastore.DATADIR): os.makedirs(datastore.DATADIR) dbserver.ensure_on() if upgrade_db: logs.set_level('info') msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts') if msg.startswith('Your database is already updated'): pass elif yes or confirm('Proceed? (y/n) '): logs.dbcmd('upgrade_db') sys.exit(0) if version_db: print(logs.dbcmd('version_db')) sys.exit(0) if what_if_I_upgrade: print(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts')) sys.exit(0) # check if the db is outdated outdated = logs.dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk if hazard_calculation_id: hc_id = get_job_id(hazard_calculation_id) else: hc_id = None if run: job_inis = [os.path.expanduser(ini) for ini in run.split(',')] if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = os.path.expanduser(log_file) \ if log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], log_level, log_file, exports) # run risk run_job(job_inis[1], log_level, log_file, exports, hazard_calculation_id=job_id) else: run_job( os.path.expanduser(run), log_level, log_file, exports, hazard_calculation_id=hc_id) # hazard elif list_hazard_calculations: for line in logs.dbcmd( 'list_calculations', 'hazard', getpass.getuser()): print(line) elif run_hazard is not None: print('WARN: --rh/--run-hazard are deprecated, use --run instead', file=sys.stderr) log_file = os.path.expanduser(log_file) \ if log_file is not None else None run_job(os.path.expanduser(run_hazard), log_level, log_file, exports) elif delete_calculation is not None: del_calculation(delete_calculation, yes) # risk elif list_risk_calculations: for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()): print(line) elif run_risk is not None: print('WARN: --rr/--run-risk are deprecated, use --run instead', file=sys.stderr) if hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = os.path.expanduser(log_file) \ if log_file is not None else None run_job( os.path.expanduser(run_risk), log_level, log_file, exports, hazard_calculation_id=hc_id) # export elif make_html_report: print('Written %s' % make_report(make_html_report)) sys.exit(0) elif list_outputs is not None: hc_id = get_job_id(list_outputs) for line in logs.dbcmd('list_outputs', hc_id): print(line) elif show_log is not None: hc_id = get_job_id(show_log) for line in logs.dbcmd('get_log', hc_id): print(line) elif export_output is not None: output_id, target_dir = export_output dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id)) for line in core.export_output( dskey, calc_id, datadir, os.path.expanduser(target_dir), exports or 'xml,csv'): print(line) elif export_outputs is not None: job_id, target_dir = export_outputs hc_id = get_job_id(job_id) for line in core.export_outputs( hc_id, os.path.expanduser(target_dir), exports or 'xml,csv'): print(line) elif delete_uncompleted_calculations: logs.dbcmd('delete_uncompleted_calculations', getpass.getuser()) else: engine.parentparser.prog = 'oq engine' engine.parentparser.print_usage()
import os import sys os.environ["OQ_DISTRIBUTE"] = "celery" os.environ["DJANGO_SETTINGS_MODULE"] = "openquake.server.settings" # just in the case that are you using oq-engine from sources # with the rest of oq libraries installed into the system (or a # virtual environment) you must set this environment variable if os.environ.get("OQ_ENGINE_USE_SRCDIR"): sys.modules['openquake'].__dict__["__path__"].insert( 0, os.path.join(os.path.dirname(__file__), "openquake")) from openquake.engine import config config.abort_if_no_config_available() sys.path.insert(0, os.path.dirname(__file__)) amqp = config.get_section("amqp") # RabbitMQ broker (default) BROKER_URL = 'amqp://%(user)s:%(password)s@%(host)s:%(port)s/%(vhost)s' % \ amqp # Redis broker (works only on Trusty) # BROKER_URL = 'redis://%(host)s:6379/0' % amqp # BROKER_POOL_LIMIT enables a connections pool so Celery can reuse # a single connection to RabbitMQ. Value 10 is the default from # Celery 2.5 where this feature is enabled by default. # Actually disabled because it's not stable in production.
import os import sys os.environ["OQ_DISTRIBUTE"] = "celery" # just in the case that are you using oq-engine from sources # with the rest of oq libraries installed into the system (or a # virtual environment) you must set this environment variable if os.environ.get("OQ_ENGINE_USE_SRCDIR"): sys.modules['openquake'].__dict__["__path__"].insert( 0, os.path.join(os.path.dirname(__file__), "openquake")) from openquake.engine import config config.abort_if_no_config_available() amqp = config.get_section("amqp") # RabbitMQ broker (default) BROKER_URL = 'amqp://%(user)s:%(password)s@%(host)s:%(port)s/%(vhost)s' % \ amqp # Redis broker (works only on Trusty) # BROKER_URL = 'redis://%(host)s:6379/0' % amqp # BROKER_POOL_LIMIT enables a connections pool so Celery can reuse # a single connection to RabbitMQ. Value 10 is the default from # Celery 2.5 where this feature is enabled by default. # Actually disabled because it's not stable in production. # See https://bugs.launchpad.net/oq-engine/+bug/1250402 BROKER_POOL_LIMIT = None