def expose_outputs(dstore): """ Build a correspondence between the outputs in the datastore and the ones in the database. :param dstore: datastore """ oq = dstore['oqparam'] exportable = set(ekey[0] for ekey in export.export) calcmode = oq.calculation_mode dskeys = set(dstore) & exportable # exportable datastore keys if oq.uniform_hazard_spectra: dskeys.add('uhs') # export them if oq.hazard_maps: dskeys.add('hmaps') # export them if 'rcurves-rlzs' in dstore or 'loss_curves-rlzs' in dstore: dskeys.add('loss_maps-rlzs') if 'rcurves-stats' in dstore or 'loss_curves-stats' in dstore: dskeys.add('loss_maps-stats') try: rlzs = dstore['realizations'] except KeyError: rlzs = [] if 'all_loss_ratios' in dskeys: dskeys.remove('all_loss_ratios') # export only specific IDs if 'realizations' in dskeys and len(rlzs) <= 1: dskeys.remove('realizations') # do not export a single realization if 'ruptures' in dskeys and 'scenario' in calcmode: exportable.remove('ruptures') # do not export, as requested by Vitor logs.dbcmd('create_outputs', dstore.calc_id, sorted(dskeys & exportable))
def run_job(cfg_file, log_level, log_file, exports='', hazard_calculation_id=None): """ Run a job using the specified config file and other options. :param str cfg_file: Path to calculation config (INI-style) files. :param str log_level: 'debug', 'info', 'warn', 'error', or 'critical' :param str log_file: Path to log file. :param exports: A comma-separated string of export types requested by the user. Currently only 'xml' is supported. :param hazard_calculation_id: ID of the previous calculation or None """ job_id, oqparam = dbcmd( 'job_from_file', cfg_file, getpass.getuser(), hazard_calculation_id) calc = engine.run_calc(job_id, oqparam, log_level, log_file, exports, hazard_calculation_id=hazard_calculation_id) duration = calc.monitor.duration # set this before monitor.flush() calc.monitor.flush() dbcmd('print_results', job_id, duration) return job_id
def expose_outputs(dstore): """ Build a correspondence between the outputs in the datastore and the ones in the database. :param dstore: datastore """ oq = dstore['oqparam'] exportable = set(ekey[0] for ekey in export.export) # small hack: remove the sescollection outputs from scenario # calculators, as requested by Vitor calcmode = oq.calculation_mode dskeys = set(dstore) & exportable # exportable datastore keys if oq.uniform_hazard_spectra: dskeys.add('uhs') # export them if oq.hazard_maps: dskeys.add('hmaps') # export them try: rlzs = dstore['realizations'] except KeyError: rlzs = [] if 'realizations' in dskeys and len(rlzs) <= 1: dskeys.remove('realizations') # do not export a single realization if 'sescollection' in dskeys and 'scenario' in calcmode: exportable.remove('sescollection') # do not export logs.dbcmd('create_outputs', dstore.calc_id, sorted(dskeys))
def dbserver(cmd): """ start/stop/restart the database server, or return its status """ if valid.boolean(config.get('dbserver', 'multi_user')): sys.exit('oq dbserver only works in single user mode') status = get_status() if cmd == 'status': print(status) elif cmd == 'stop': if status == 'running': logs.dbcmd('stop') print('stopped') else: print('already stopped') elif cmd == 'start': if status == 'not-running': runserver() print('started') else: print('already running') elif cmd == 'restart': if status == 'running': logs.dbcmd('stop') print('stopped') runserver() print('started')
def calc_remove(request, calc_id): """ Remove the calculation id by setting the field oq_job.relevant to False. """ try: logs.dbcmd('set_relevant', calc_id, False) except models.NotFound: return HttpResponseNotFound() return HttpResponse(content=json.dumps([]), content_type=JSON, status=200)
def del_calculation(job_id, confirmed=False): """ Delete a calculation and all associated outputs. """ if confirmed or confirm( 'Are you sure you want to delete this calculation and all ' 'associated outputs?\nThis action cannot be undone. (y/n): '): try: logs.dbcmd('del_calc', job_id, getpass.getuser()) except RuntimeError as err: print(err)
def make_report(isodate='today'): """ Build a HTML report with the computations performed at the given isodate. Return the name of the report, which is saved in the current directory. """ if isodate == 'today': isodate = date.today() else: isodate = date(*time.strptime(isodate, '%Y-%m-%d')[:3]) isodate1 = isodate + timedelta(1) # +1 day tag_ids = [] tag_status = [] tag_contents = [] # the fetcher returns an header which is stripped with [1:] jobs = dbcmd( 'fetch', ALL_JOBS, isodate.isoformat(), isodate1.isoformat())[1:] page = '<h2>%d job(s) finished before midnight of %s</h2>' % ( len(jobs), isodate) for job_id, user, status, ds_calc in jobs: tag_ids.append(job_id) tag_status.append(status) stats = dbcmd('fetch', JOB_STATS, job_id) if not stats[1:]: continue (job_id, user, start_time, stop_time, status) = stats[1] try: ds = read(job_id, datadir=os.path.dirname(ds_calc)) txt = view_fullreport('fullreport', ds).decode('utf-8') report = html_parts(txt) except Exception as exc: report = dict( html_title='Could not generate report: %s' % cgi.escape( unicode(exc), quote=True), fragment='') page = report['html_title'] add_duration(stats) page += html(stats) page += report['fragment'] tag_contents.append(page) page = make_tabs(tag_ids, tag_status, tag_contents) + ( 'Report last updated: %s' % datetime.now()) fname = 'jobs-%s.html' % isodate with open(fname, 'w') as f: f.write(PAGE_TEMPLATE % page.encode('utf-8')) return fname
def run_job(cfg_file, log_level, log_file, exports='', hazard_calculation_id=None): """ Run a job using the specified config file and other options. :param str cfg_file: Path to calculation config (INI-style) files. :param str log_level: 'debug', 'info', 'warn', 'error', or 'critical' :param str log_file: Path to log file. :param exports: A comma-separated string of export types requested by the user. :param hazard_calculation_id: ID of the previous calculation or None """ # if the master dies, automatically kill the workers concurrent_futures_process_monkeypatch() job_ini = os.path.abspath(cfg_file) job_id, oqparam = eng.job_from_file( job_ini, getpass.getuser(), hazard_calculation_id) calc = eng.run_calc(job_id, oqparam, log_level, log_file, exports, hazard_calculation_id=hazard_calculation_id) calc.monitor.flush() for line in logs.dbcmd('list_outputs', job_id, False): print(line) return job_id
def calc(request, id=None): """ Get a list of calculations and report their id, status, job_type, is_running, description, and a url where more detailed information can be accessed. This is called several times by the Javascript. Responses are in JSON. """ base_url = _get_base_url(request) user = utils.get_user_data(request) calc_data = logs.dbcmd('get_calcs', request.GET, user['name'], user['acl_on'], id) response_data = [] for hc_id, owner, status, job_type, is_running, desc in calc_data: url = urlparse.urljoin(base_url, 'v1/calc/%d' % hc_id) response_data.append( dict(id=hc_id, owner=owner, status=status, job_type=job_type, is_running=is_running, description=desc, url=url)) # if id is specified the related dictionary is returned instead the list if id is not None: [response_data] = response_data return HttpResponse(content=json.dumps(response_data), content_type=JSON)
def get_result(request, result_id): """ Download a specific result, by ``result_id``. The common abstracted functionality for getting hazard or risk results. :param request: `django.http.HttpRequest` object. Can contain a `export_type` GET param (the default is 'xml' if no param is specified). :param result_id: The id of the requested artifact. :returns: If the requested ``result_id`` is not available in the format designated by the `export_type`. Otherwise, return a `django.http.HttpResponse` containing the content of the requested artifact. Parameters for the GET request can include an `export_type`, such as 'xml', 'geojson', 'csv', etc. """ # If the result for the requested ID doesn't exist, OR # the job which it is related too is not complete, # throw back a 404. try: job_id, job_status, datadir, ds_key = logs.dbcmd( 'get_result', result_id) if not job_status == 'complete': return HttpResponseNotFound() except models.NotFound: return HttpResponseNotFound() etype = request.GET.get('export_type') export_type = etype or DEFAULT_EXPORT_TYPE tmpdir = tempfile.mkdtemp() try: exported = core.export_from_datastore( (ds_key, export_type), job_id, datadir, tmpdir) except DataStoreExportError as exc: # TODO: there should be a better error page return HttpResponse(content='%s: %s' % (exc.__class__.__name__, exc), content_type='text/plain', status=500) if exported is None: # Throw back a 404 if the exact export parameters are not supported return HttpResponseNotFound( 'export_type=%s is not supported for %s' % (export_type, ds_key)) content_type = EXPORT_CONTENT_TYPE_MAP.get( export_type, DEFAULT_CONTENT_TYPE) try: fname = 'output-%s-%s' % (result_id, os.path.basename(exported)) # 'b' is needed when running the WebUI on Windows data = open(exported, 'rb').read() response = HttpResponse(data, content_type=content_type) response['Content-Length'] = len(data) response['Content-Disposition'] = 'attachment; filename=%s' % fname return response finally: shutil.rmtree(tmpdir)
def calc_results(request, calc_id): """ Get a summarized list of calculation results for a given ``calc_id``. Result is a JSON array of objects containing the following attributes: * id * name * type (hazard_curve, hazard_map, etc.) * url (the exact url where the full result can be accessed) """ user = utils.get_user_data(request) # If the specified calculation doesn't exist OR is not yet complete, # throw back a 404. try: info = logs.dbcmd('calc_info', calc_id) if user['acl_on'] and info['user_name'] != user['name']: return HttpResponseNotFound() except models.NotFound: return HttpResponseNotFound() base_url = _get_base_url(request) # NB: export_output has as keys the list (output_type, extension) # so this returns an ordered map output_type -> extensions such as # OrderedDict([('agg_loss_curve', ['xml', 'csv']), ...]) output_types = groupby(export, lambda oe: oe[0], lambda oes: [e for o, e in oes]) results = logs.dbcmd('get_outputs', calc_id) if not results: return HttpResponseNotFound() response_data = [] for result in results: try: # output from the datastore rtype = result.ds_key # Catalina asked to remove the .txt outputs (used for the GMFs) outtypes = [ot for ot in output_types[rtype] if ot != 'txt'] except KeyError: continue # non-exportable outputs should not be shown url = urlparse.urljoin(base_url, 'v1/calc/result/%d' % result.id) datum = dict( id=result.id, name=result.display_name, type=rtype, outtypes=outtypes, url=url) response_data.append(datum) return HttpResponse(content=json.dumps(response_data))
def get_log_size(request, calc_id): """ Get the current number of lines in the log """ try: response_data = logs.dbcmd('get_log_size', calc_id) except models.NotFound: return HttpResponseNotFound() return HttpResponse(content=json.dumps(response_data), content_type=JSON)
def run_calc(job_id, oqparam, log_level, log_file, exports, hazard_calculation_id=None): """ Run a calculation. :param job_id: ID of the current job :param oqparam: :class:`openquake.commonlib.oqvalidation.OqParam` instance :param str log_level: The desired logging level. Valid choices are 'debug', 'info', 'progress', 'warn', 'error', and 'critical'. :param str log_file: Complete path (including file name) to file where logs will be written. If `None`, logging will just be printed to standard output. :param exports: A comma-separated string of export types. """ monitor = Monitor('total runtime', measuremem=True) with logs.handle(job_id, log_level, log_file): # run the job if USE_CELERY and os.environ.get('OQ_DISTRIBUTE') == 'celery': set_concurrent_tasks_default() calc = base.calculators(oqparam, monitor, calc_id=job_id) tb = 'None\n' try: logs.dbcmd('set_status', job_id, 'executing') _do_run_calc(calc, exports, hazard_calculation_id) expose_outputs(calc.datastore) records = views.performance_view(calc.datastore) logs.dbcmd('save_performance', job_id, records) calc.datastore.close() logs.LOG.info('Calculation %d finished correctly in %d seconds', job_id, calc.monitor.duration) logs.dbcmd('finish', job_id, 'complete') except: tb = traceback.format_exc() try: logs.LOG.critical(tb) logs.dbcmd('finish', job_id, 'failed') except: # an OperationalError may always happen sys.stderr.write(tb) raise finally: # if there was an error in the calculation, this part may fail; # in such a situation, we simply log the cleanup error without # taking further action, so that the real error can propagate try: if USE_CELERY: celery_cleanup(TERMINATE, parallel.TaskManager.task_ids) except: # log the finalization error only if there is no real error if tb == 'None\n': logs.LOG.error('finalizing', exc_info=True) return calc
def get_traceback(request, calc_id): """ Get the traceback as a list of lines for a given ``calc_id``. """ # If the specified calculation doesn't exist throw back a 404. try: response_data = logs.dbcmd('get_traceback', calc_id) except models.NotFound: return HttpResponseNotFound() return HttpResponse(content=json.dumps(response_data), content_type=JSON)
def export_outputs(job_id, target_dir, export_types): # make it possible commands like `oq engine --eos -1 /tmp` datadir, dskeys = logs.dbcmd('get_results', job_id) if not dskeys: yield('Found nothing to export for job %s' % job_id) for dskey in dskeys: yield('Exporting %s...' % dskey) for line in export_output( dskey, job_id, datadir, target_dir, export_types): yield line
def get_log_slice(request, calc_id, start, stop): """ Get a slice of the calculation log as a JSON list of rows """ start = start or 0 stop = stop or None try: response_data = logs.dbcmd('get_log_slice', calc_id, start, stop) except models.NotFound: return HttpResponseNotFound() return HttpResponse(content=json.dumps(response_data), content_type=JSON)
def calc_info(request, calc_id): """ Get a JSON blob containing all of parameters for the given calculation (specified by ``calc_id``). Also includes the current job status ( executing, complete, etc.). """ try: info = logs.dbcmd('calc_info', calc_id) except models.NotFound: return HttpResponseNotFound() return HttpResponse(content=json.dumps(info), content_type=JSON)
def purge_one(calc_id, user): """ Remove one calculation ID from the database and remove its datastore """ hdf5path = os.path.join(datastore.DATADIR, 'calc_%s.hdf5' % calc_id) err = dbcmd('del_calc', calc_id, user) if err: print(err) if os.path.exists(hdf5path): os.remove(hdf5path) print('Removed %s' % hdf5path)
def expose_outputs(dstore): """ Build a correspondence between the outputs in the datastore and the ones in the database. :param dstore: datastore """ oq = dstore['oqparam'] exportable = set(ekey[0] for ekey in export.export) # small hack: remove the sescollection outputs from scenario # calculators, as requested by Vitor calcmode = oq.calculation_mode dskeys = set(dstore) & exportable # exportable datastore keys if oq.uniform_hazard_spectra: dskeys.add('uhs') # export them if 'hmaps' in dskeys and not oq.hazard_maps: dskeys.remove('hmaps') # do not export if 'realizations' in dskeys and len(dstore['realizations']) <= 1: dskeys.remove('realizations') # do not export a single realization if 'sescollection' in dskeys and 'scenario' in calcmode: exportable.remove('sescollection') # do not export logs.dbcmd('create_outputs', dstore.calc_id, sorted(dskeys))
def run_calc(job_id, oqparam, log_level, log_file, exports, hazard_calculation_id=None): """ Run a calculation. :param job_id: ID of the current job :param oqparam: :class:`openquake.commonlib.oqvalidation.OqParam` instance :param str log_level: The desired logging level. Valid choices are 'debug', 'info', 'progress', 'warn', 'error', and 'critical'. :param str log_file: Complete path (including file name) to file where logs will be written. If `None`, logging will just be printed to standard output. :param exports: A comma-separated string of export types. """ monitor = Monitor('total runtime', measuremem=True) with logs.handle(job_id, log_level, log_file): # run the job if USE_CELERY and os.environ.get('OQ_DISTRIBUTE') == 'celery': set_concurrent_tasks_default() calc = base.calculators(oqparam, monitor, calc_id=job_id) tb = 'None\n' try: _do_run_calc(calc, exports, hazard_calculation_id) logs.dbcmd('finish', job_id, 'complete') expose_outputs(calc.datastore) records = views.performance_view(calc.datastore) logs.dbcmd('save_performance', job_id, records) calc.datastore.close() logs.LOG.info('Calculation %d finished correctly in %d seconds', job_id, calc.monitor.duration) except: tb = traceback.format_exc() try: logs.LOG.critical(tb) logs.dbcmd('finish', job_id, 'failed') except: # an OperationalError may always happen sys.stderr.write(tb) raise finally: # if there was an error in the calculation, this part may fail; # in such a situation, we simply log the cleanup error without # taking further action, so that the real error can propagate try: if USE_CELERY: celery_cleanup(TERMINATE, parallel.TaskManager.task_ids) except: # log the finalization error only if there is no real error if tb == 'None\n': logs.LOG.error('finalizing', exc_info=True) return calc
def job_from_file(cfg_file, username, hazard_calculation_id=None): """ Create a full job profile from a job config file. :param str cfg_file: Path to a job.ini file. :param str username: The user who will own this job profile and all results :param str datadir: Data directory of the user :param hazard_calculation_id: ID of a previous calculation or None :returns: a pair (job_id, oqparam) """ oq = readinput.get_oqparam(cfg_file) job_id = logs.dbcmd('create_job', oq.calculation_mode, oq.description, username, datastore.DATADIR, hazard_calculation_id) return job_id, oq
def db(cmd, args=''): """ Run a database command """ args = shlex.split(args) if cmd not in commands: okcmds = '\n'.join( '%s %s' % (name, repr(' '.join(args)) if args else '') for name, args in sorted(commands.items())) print('Invalid command "%s": choose one from\n%s' % (cmd, okcmds)) elif len(args) != len(commands[cmd]): print('Wrong number of arguments, expected %s, got %s' % ( commands[cmd], args)) else: dbserver.ensure_on() res = logs.dbcmd(cmd, *convert(args)) if hasattr(res, '_fields'): print(rst_table(res)) else: print(res)
def get_datastore(request, job_id): """ Download a full datastore file. :param request: `django.http.HttpRequest` object. :param job_id: The id of the requested datastore :returns: A `django.http.HttpResponse` containing the content of the requested artifact, if present, else throws a 404 """ try: job = logs.dbcmd('get_job', int(job_id), getpass.getuser()) except dbapi.NotFound: return HttpResponseNotFound() fname = job.ds_calc_dir + '.hdf5' response = FileResponse( FileWrapper(open(fname, 'rb')), content_type=HDF5) response['Content-Disposition'] = 'attachment; filename=%s' % fname return response
def get_result(request, result_id): """ Download a specific result, by ``result_id``. The common abstracted functionality for getting hazard or risk results. :param request: `django.http.HttpRequest` object. Can contain a `export_type` GET param (the default is 'xml' if no param is specified). :param result_id: The id of the requested artifact. :returns: If the requested ``result_id`` is not available in the format designated by the `export_type`. Otherwise, return a `django.http.HttpResponse` containing the content of the requested artifact. Parameters for the GET request can include an `export_type`, such as 'xml', 'geojson', 'csv', etc. """ # If the result for the requested ID doesn't exist, OR # the job which it is related too is not complete, # throw back a 404. try: job_id, job_status, datadir, ds_key = logs.dbcmd( 'get_result', result_id) if not job_status == 'complete': return HttpResponseNotFound() except models.NotFound: return HttpResponseNotFound() etype = request.GET.get('export_type') export_type = etype or DEFAULT_EXPORT_TYPE tmpdir = tempfile.mkdtemp() try: exported = core.export_from_datastore((ds_key, export_type), job_id, datadir, tmpdir) except DataStoreExportError as exc: # TODO: there should be a better error page return HttpResponse(content='%s: %s' % (exc.__class__.__name__, exc), content_type='text/plain', status=500) if exported is None: # Throw back a 404 if the exact export parameters are not supported return HttpResponseNotFound('export_type=%s is not supported for %s' % (export_type, ds_key)) content_type = EXPORT_CONTENT_TYPE_MAP.get(export_type, DEFAULT_CONTENT_TYPE) try: fname = 'output-%s-%s' % (result_id, os.path.basename(exported)) # 'b' is needed when running the WebUI on Windows data = open(exported, 'rb').read() response = HttpResponse(data, content_type=content_type) response['Content-Length'] = len(data) response['Content-Disposition'] = 'attachment; filename=%s' % fname return response finally: shutil.rmtree(tmpdir)
# make sure we use the server on the temporary db config.DBS_ADDRESS = ('localhost', port) def parse_args(argv): # manages the argument "tmpdb=XXX" used in the functional tests args = [] dbname = None for arg in argv: if arg.startswith('tmpdb='): dbname = arg[6:] else: args.append(arg) return args, dbname # the code here is run in development mode; for instance # $ python manage.py runserver 0.0.0.0:8800 if __name__ == "__main__": os.environ.setdefault( "DJANGO_SETTINGS_MODULE", "openquake.server.settings") argv, tmpfile_port = parse_args(sys.argv) if tmpfile_port: # this is used in the functional tests use_tmp_db(tmpfile_port) else: # check the database version logs.dbcmd('check_outdated') # reset is_running logs.dbcmd('reset_is_running') with executor: execute_from_command_line(argv)
def engine(log_file, no_distribute, yes, config_file, make_html_report, upgrade_db, version_db, what_if_I_upgrade, run_hazard, run_risk, run, list_hazard_calculations, list_risk_calculations, delete_calculation, delete_uncompleted_calculations, hazard_calculation_id, list_outputs, show_log, export_output, export_outputs, exports='', log_level='info'): """ Run a calculation using the traditional command line API """ config.abort_if_no_config_available() if run or run_hazard or run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = os.path.abspath( os.path.expanduser(config_file)) config.refresh() if no_distribute: os.environ['OQ_DISTRIBUTE'] = 'no' # check if the datadir exists if not os.path.exists(datastore.DATADIR): os.makedirs(datastore.DATADIR) dbserver.ensure_on() if upgrade_db: logs.set_level('info') msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts') if msg.startswith('Your database is already updated'): pass elif yes or confirm('Proceed? (y/n) '): logs.dbcmd('upgrade_db') sys.exit(0) if version_db: print(logs.dbcmd('version_db')) sys.exit(0) if what_if_I_upgrade: print(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts')) sys.exit(0) # check if the db is outdated outdated = logs.dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk if hazard_calculation_id: hc_id = get_job_id(hazard_calculation_id) else: hc_id = None if run: job_inis = [os.path.expanduser(ini) for ini in run.split(',')] if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = os.path.expanduser(log_file) \ if log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], log_level, log_file, exports) # run risk run_job(job_inis[1], log_level, log_file, exports, hazard_calculation_id=job_id) else: run_job( os.path.expanduser(run), log_level, log_file, exports, hazard_calculation_id=hc_id) # hazard elif list_hazard_calculations: for line in logs.dbcmd( 'list_calculations', 'hazard', getpass.getuser()): print(line) elif run_hazard is not None: print('WARN: --rh/--run-hazard are deprecated, use --run instead', file=sys.stderr) log_file = os.path.expanduser(log_file) \ if log_file is not None else None run_job(os.path.expanduser(run_hazard), log_level, log_file, exports) elif delete_calculation is not None: del_calculation(delete_calculation, yes) # risk elif list_risk_calculations: for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()): print(line) elif run_risk is not None: print('WARN: --rr/--run-risk are deprecated, use --run instead', file=sys.stderr) if hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = os.path.expanduser(log_file) \ if log_file is not None else None run_job( os.path.expanduser(run_risk), log_level, log_file, exports, hazard_calculation_id=hc_id) # export elif make_html_report: print('Written %s' % make_report(make_html_report)) sys.exit(0) elif list_outputs is not None: hc_id = get_job_id(list_outputs) for line in logs.dbcmd('list_outputs', hc_id): print(line) elif show_log is not None: hc_id = get_job_id(show_log) for line in logs.dbcmd('get_log', hc_id): print(line) elif export_output is not None: output_id, target_dir = export_output dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id)) for line in core.export_output( dskey, calc_id, datadir, os.path.expanduser(target_dir), exports or 'xml,csv'): print(line) elif export_outputs is not None: job_id, target_dir = export_outputs hc_id = get_job_id(job_id) for line in core.export_outputs( hc_id, os.path.expanduser(target_dir), exports or 'xml,csv'): print(line) elif delete_uncompleted_calculations: logs.dbcmd('delete_uncompleted_calculations', getpass.getuser()) else: engine.parentparser.prog = 'oq engine' engine.parentparser.print_usage()
config.DBS_ADDRESS = ('localhost', port) def parse_args(argv): # manages the argument "tmpdb=XXX" used in the functional tests args = [] dbname = None for arg in argv: if arg.startswith('tmpdb='): dbname = arg[6:] else: args.append(arg) return args, dbname # the code here is run in development mode; for instance # $ python manage.py runserver 0.0.0.0:8800 if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "openquake.server.settings") argv, tmpfile_port = parse_args(sys.argv) if tmpfile_port: # this is used in the functional tests use_tmp_db(tmpfile_port) else: # check the database version logs.dbcmd('check_outdated') # reset is_running logs.dbcmd('reset_is_running') with executor: execute_from_command_line(argv)
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print openquake.engine.__version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ['OQ_DISTRIBUTE'] = 'no' # check if the DbServer is up sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: err = sock.connect_ex(config.DBS_ADDRESS) finally: sock.close() if err: multi_user = valid.boolean(config.get('dbserver', 'multi_user')) if multi_user: sys.exit('Please start the DbServer: ' 'see the documentation for details') # otherwise start the DbServer automatically dblog = os.path.expanduser('~/oq-dbserver.log') subprocess.Popen([sys.executable, '-m', 'openquake.server.dbserver', '-l', 'INFO'], stderr=open(dblog, 'w')) if args.upgrade_db: logs.set_level('info') msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts') if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): logs.dbcmd('upgrade_db') sys.exit(0) if args.version_db: print logs.dbcmd('version_db') sys.exit(0) if args.what_if_I_upgrade: print logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts') sys.exit(0) # check if the db is outdated outdated = logs.dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk if args.hazard_calculation_id: hc_id = get_job_id(args.hazard_calculation_id) else: hc_id = None if args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job_id) else: run_job( expanduser(args.run), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # hazard elif args.list_hazard_calculations: for line in logs.dbcmd( 'list_calculations', 'hazard', getpass.getuser()): print line elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_calculation is not None: delete_calculation(args.delete_calculation, args.yes) # risk elif args.list_risk_calculations: for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()): print line elif args.run_risk is not None: if args.hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job( expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # export elif args.make_html_report: print 'Written', make_report(args.make_html_report) sys.exit(0) elif args.list_outputs is not None: hc_id = get_job_id(args.list_outputs) for line in logs.dbcmd('list_outputs', hc_id): print line elif args.show_view is not None: job_id, view_name = args.show_view print views.view(view_name, datastore.read(int(job_id))) elif args.show_log is not None: hc_id = get_job_id(args.show_log[0]) for line in logs.dbcmd('get_log', hc_id): print line elif args.export_output is not None: output_id, target_dir = args.export_output dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id)) for line in core.export_output( dskey, calc_id, datadir, expanduser(target_dir), exports): print line elif args.export_outputs is not None: job_id, target_dir = args.export_outputs hc_id = get_job_id(job_id) for line in core.export_outputs( hc_id, expanduser(target_dir), exports): print line elif args.delete_uncompleted_calculations: logs.dbcmd('delete_uncompleted_calculations', getpass.getuser()) else: arg_parser.print_usage()
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright (C) 2015-2016 GEM Foundation # # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "openquake.server.settings") from django.db import connection from openquake.engine import logs logs.dbcmd('reset_is_running') # This application object is used by the development server # as well as any WSGI server configured to use this file. application = get_wsgi_application()
def get_job_id(job_id, username=None): username = username or getpass.getuser() job_id = logs.dbcmd('get_job_id', job_id, username) if not job_id: sys.exit('Job %s of %s not found' % (job_id, username)) return job_id
def read(calc_id): job = logs.dbcmd('get_job', calc_id, getpass.getuser()) datadir = os.path.dirname(job.ds_calc_dir) return datastore.read(job.id, datadir=datadir)
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print __version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ['OQ_DISTRIBUTE'] = 'no' if args.upgrade_db: logs.set_level('info') msg = upgrade_manager.what_if_I_upgrade( conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or utils.confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) # check if the db is outdated outdated = dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk hc_id = args.hazard_calculation_id if hc_id: hc_id = int(hc_id) if hc_id < 0: # make it possible to run `oq-engine --run job_risk.ini --hc -1` hc_id = dbcmd('get_hc_id', hc_id) if args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job_id) else: run_job( expanduser(args.run), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # hazard elif args.list_hazard_calculations: dbcmd('list_calculations', 'hazard') elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_calculation is not None: dbcmd('delete_calculation', args.delete_calculation, args.yes) # risk elif args.list_risk_calculations: dbcmd('list_calculations', 'risk') elif args.run_risk is not None: if args.hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job( expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # export elif args.make_html_report: print 'Written', make_report(conn, args.make_html_report) sys.exit(0) elif args.list_outputs is not None: hc_id = dbcmd('get_hc_id', args.list_outputs) dbcmd('list_outputs', hc_id) elif args.show_view is not None: job_id, view_name = args.show_view print views.view(view_name, datastore.read(int(job_id))) elif args.show_log is not None: hc_id = dbcmd('get_hc_id', args.show_log[0]) print dbcmd('get_log', hc_id) elif args.export_output is not None: output_id, target_dir = args.export_output dbcmd('export_output', int(output_id), expanduser(target_dir), exports) elif args.export_outputs is not None: job_id, target_dir = args.export_outputs hc_id = dbcmd('get_hc_id', job_id) dbcmd('export_outputs', hc_id, expanduser(target_dir), exports) elif args.delete_uncompleted_calculations: dbcmd('delete_uncompleted_calculations') else: arg_parser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print __version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[utils.config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) utils.config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.upgrade_db: logs.set_level('info') msg = upgrade_manager.what_if_I_upgrade( conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or utils.confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) # check if the db is outdated outdated = dbcmd('check_outdated') if outdated: sys.exit(outdated) # hazard or hazard+risk hc_id = args.hazard_calculation_id if hc_id and int(hc_id) < 0: # make it possible commands like `oq-engine --run job_risk.ini --hc -1` hc_id = dbcmd('get_hc_id', int(hc_id)) if args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if len(job_inis) == 2: # run hazard job_id = run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job_id) else: run_job( expanduser(args.run), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # hazard elif args.list_hazard_calculations: dbcmd('list_calculations', 'hazard') elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_calculation is not None: dbcmd('delete_calculation', args.delete_calculation, args.yes) # risk elif args.list_risk_calculations: dbcmd('list_calculations', 'risk') elif args.run_risk is not None: if args.hazard_calculation_id is None: sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None run_job( expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_calculation_id=hc_id) # export elif args.make_html_report: print 'Written', make_report(conn, args.make_html_report) sys.exit(0) elif args.list_outputs is not None: hc_id = dbcmd('get_hc_id', args.list_outputs) dbcmd('list_outputs', hc_id) elif args.show_view is not None: job_id, view_name = args.show_view print views.view(view_name, datastore.read(int(job_id))) elif args.show_log is not None: hc_id = dbcmd('get_hc_id', args.show_log[0]) print dbcmd('get_log', hc_id) elif args.export_output is not None: output_id, target_dir = args.export_output dbcmd('export_output', int(output_id), expanduser(target_dir), exports) elif args.export_outputs is not None: job_id, target_dir = args.export_outputs hc_id = dbcmd('get_hc_id', job_id) dbcmd('export_outputs', hc_id, expanduser(target_dir), exports) elif args.delete_uncompleted_calculations: dbcmd('delete_uncompleted_calculations') else: arg_parser.print_usage()
from openquake.server.dbapi import Db from openquake.engine import logs db = Db(sqlite3.connect, DATABASE['NAME'], isolation_level=None, detect_types=sqlite3.PARSE_DECLTYPES, timeout=20) # NB: I am increasing the timeout from 5 to 20 seconds to see if the random # OperationalError: "database is locked" disappear in the WebUI tests # bypass the DbServer and run the action directly def dbcmd(action, *args): """ A dispatcher to the database server. :param action: database action to perform :param args: arguments """ return getattr(actions, action)(db, *args) # the code here is run in development mode; for instance # $ python manage.py runserver 0.0.0.0:8800 if __name__ == "__main__": os.environ.setdefault( "DJANGO_SETTINGS_MODULE", "openquake.server.settings") if '--nothreading' in sys.argv: logs.dbcmd = dbcmd # turn this on when debugging logs.dbcmd('upgrade_db') # make sure the DB exists with executor: execute_from_command_line(sys.argv)
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2016 GEM Foundation # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. from openquake.engine.logs import dbcmd if __name__ == '__main__': dbcmd('stop')