Exemplo n.º 1
0
def run(job_ini,
        slowest=False,
        hc=None,
        param='',
        concurrent_tasks=None,
        exports='',
        loglevel='info',
        calc_id='nojob',
        pdb=None,
        reuse_input=None):
    """
    Run a calculation bypassing the database layer
    """
    dbserver.ensure_on()
    if param:
        params = dict(p.split('=', 1) for p in param.split(','))
    else:
        params = {}
    if hc:
        params['hazard_calculation_id'] = str(hc)
    if slowest:
        prof = cProfile.Profile()
        stmt = ('_run(job_ini, concurrent_tasks, calc_id, pdb, reuse_input, '
                'loglevel, hc, exports, params)')
        prof.runctx(stmt, globals(), locals())
        pstat = calc_path + '.pstat'
        prof.dump_stats(pstat)
        print('Saved profiling info in %s' % pstat)
        print(get_pstats(pstat, slowest))
        return
    try:
        return _run(job_ini, concurrent_tasks, calc_id, pdb, reuse_input,
                    loglevel, exports, params)
    finally:
        parallel.Starmap.shutdown()
Exemplo n.º 2
0
def importcalc(host, calc_id, username, password):
    """
    Import a remote calculation into the local database
    """
    if '/' in host.split('//', 1)[1]:
        sys.exit('Wrong host ending with /%s' % host.rsplit('/', 1)[1])
    calc_url = '/'.join([host, 'v1/calc', str(calc_id)])
    dbserver.ensure_on()
    job = logs.dbcmd('get_job', calc_id)
    if job is not None:
        sys.exit('There is already a job #%d in the local db' % calc_id)

    datadir = datastore.get_datadir()
    session = login(host, username, password)
    status = session.get('%s/status' % calc_url)
    if 'Log in to an existing account' in status.text:
        sys.exit('Could not login')
    json = status.json()
    if json["parent_id"]:
        sys.exit('The job has a parent (#%(parent_id)d) and cannot be '
                 'downloaded' % json)
    resp = session.get('%s/datastore' % calc_url, stream=True)
    assert resp.status_code == 200, resp.status_code
    fname = '%s/calc_%d.hdf5' % (datadir, calc_id)
    down = 0
    with open(fname, 'wb') as f:
        logging.info('%s -> %s', calc_url, fname)
        for chunk in resp.iter_content(CHUNKSIZE):
            f.write(chunk)
            down += len(chunk)
            general.println('Downloaded {:,} bytes'.format(down))
    print()
    with datastore.read(calc_id) as dstore:
        engine.expose_outputs(dstore, json['owner'], json['status'])
    logging.info('Imported calculation %d successfully', calc_id)
Exemplo n.º 3
0
def importcalc(calc_id):
    """
    Import a remote calculation into the local database. server, username
    and password must be specified in an openquake.cfg file.
    NB: calc_id can be a local pathname to a datastore not already
    present in the database: in that case it is imported in the db.
    """
    dbserver.ensure_on()
    try:
        calc_id = int(calc_id)
    except ValueError:  # assume calc_id is a pathname
        remote = False
    else:
        remote = True
        job = logs.dbcmd('get_job', calc_id)
        if job is not None:
            sys.exit('There is already a job #%d in the local db' % calc_id)
    if remote:
        datadir = datastore.get_datadir()
        webex = WebExtractor(calc_id)
        hc_id = webex.oqparam.hazard_calculation_id
        if hc_id:
            sys.exit('The job has a parent (#%d) and cannot be '
                     'downloaded' % hc_id)
        webex.dump('%s/calc_%d.hdf5' % (datadir, calc_id))
        webex.close()
    with datastore.read(calc_id) as dstore:
        engine.expose_outputs(dstore, status='complete')
    logging.info('Imported calculation %s successfully', calc_id)
Exemplo n.º 4
0
def run(job_ini,
        slowest=False,
        hc=None,
        param='',
        concurrent_tasks=None,
        exports='',
        loglevel='info',
        pdb=None):
    """
    Run a calculation bypassing the database layer
    """
    dbserver.ensure_on()
    if param:
        params = oqvalidation.OqParam.check(
            dict(p.split('=', 1) for p in param.split(',')))
    else:
        params = {}
    if slowest:
        prof = cProfile.Profile()
        stmt = ('_run(job_ini, concurrent_tasks, pdb, loglevel, hc, '
                'exports, params)')
        prof.runctx(stmt, globals(), locals())
        pstat = calc_path + '.pstat'
        prof.dump_stats(pstat)
        print('Saved profiling info in %s' % pstat)
        print(get_pstats(pstat, slowest))
    else:
        return _run(job_ini, concurrent_tasks, pdb, loglevel, hc, exports,
                    params)
Exemplo n.º 5
0
def importcalc(calc_id):
    """
    Import a remote calculation into the local database. server, username
    and password must be specified in an openquake.cfg file.
    NB: calc_id can be a local pathname to a datastore not already
    present in the database: in that case it is imported in the db.
    """
    dbserver.ensure_on()
    try:
        calc_id = int(calc_id)
    except ValueError:  # assume calc_id is a pathname
        calc_id, datadir = datastore.extract_calc_id_datadir(calc_id)
        status = 'complete'
        remote = False
    else:
        remote = True
    job = logs.dbcmd('get_job', calc_id)
    if job is not None:
        sys.exit('There is already a job #%d in the local db' % calc_id)
    if remote:
        datadir = datastore.get_datadir()
        webex = WebExtractor(calc_id)
        status = webex.status['status']
        hc_id = webex.oqparam.hazard_calculation_id
        if hc_id:
            sys.exit('The job has a parent (#%d) and cannot be '
                     'downloaded' % hc_id)
        webex.dump('%s/calc_%d.hdf5' % (datadir, calc_id))
        webex.close()
    with datastore.read(calc_id) as dstore:
        engine.expose_outputs(dstore, status=status)
    logging.info('Imported calculation %d successfully', calc_id)
Exemplo n.º 6
0
def main(yes=False):
    """
    Remove all the datastores and the database of the current user
    """
    ok = yes or confirm('Do you really want to destroy all your data? (y/n) ')
    if not ok:
        return

    dbpath = os.path.realpath(os.path.expanduser(config.dbserver.file))
    if not os.path.isfile(dbpath):
        sys.exit('%s does not exist' % dbpath)
    else:
        dbserver.ensure_on()  # start the dbserver in a subprocess
        user = getpass.getuser()
        for calc_id in logs.dbcmd('get_calc_ids', user):
            purge_one(calc_id, user, force=True)
        if os.access(dbpath, os.W_OK):   # single user mode
            purge_all(user)  # calculations in oqdata not in the db
            # stop the dbserver first
            pid = logs.dbcmd('getpid')
            os.kill(pid, signal.SIGTERM)
            time.sleep(.5)  # give time to stop
            assert dbserver.get_status() == 'not-running'
            print('dbserver stopped')
            # remove the database
            os.remove(dbpath)
            print('Removed %s' % dbpath)
Exemplo n.º 7
0
def webui(cmd, hostport="127.0.0.1:8800"):
    """
    start the webui server in foreground or perform other operation on the
    django application
    """
    dbserver.ensure_on()  # start the dbserver in a subproces
    if cmd == "start":
        rundjango("runserver", hostport)
    elif cmd == "syncdb":
        rundjango("syncdb")
Exemplo n.º 8
0
def post_risk(ebr_id):
    """
    Generate loss curves and maps from an event loss table
    """
    dbserver.ensure_on()
    dstore = util.read(ebr_id)
    oq = dstore['oqparam']
    prc = PostRiskCalculator(oq)
    prc.datastore.parent = dstore
    prc.run()
    logging.info('Generated %s', prc.datastore.filename)
Exemplo n.º 9
0
def webui(cmd, hostport='127.0.0.1:8800', skip_browser=False):
    """
    start the webui server in foreground or perform other operation on the
    django application
    """
    dbpath = os.path.realpath(os.path.expanduser(config.dbserver.file))
    if os.path.isfile(dbpath) and not os.access(dbpath, os.W_OK):
        sys.exit('This command must be run by the proper user: '******'see the documentation for details')
    if cmd == 'start':
        dbserver.ensure_on()  # start the dbserver in a subprocess
        rundjango('runserver', hostport, skip_browser)
    elif cmd in commands:
        rundjango(cmd)
Exemplo n.º 10
0
 def test_oqdata(self):
     # the that the environment variable OQ_DATADIR is honored
     job_ini = os.path.join(os.path.dirname(case_2.__file__), 'job_2.ini')
     tempdir = tempfile.mkdtemp()
     dbserver.ensure_on()
     with mock.patch.dict(os.environ, OQ_DATADIR=tempdir):
         [(job_id, oq)] = run_jobs([job_ini], log_level='error')
         job = commonlib.logs.dbcmd('get_job', job_id)
         self.assertTrue(job.ds_calc_dir.startswith(tempdir),
                         job.ds_calc_dir)
     with Print.patch() as p:
         export('ruptures', job_id, 'csv', tempdir)
     self.assertIn('Exported', str(p))
     shutil.rmtree(tempdir)
Exemplo n.º 11
0
 def test_oqdata(self):
     # the that the environment variable OQ_DATADIR is honored
     job_ini = os.path.join(os.path.dirname(case_2.__file__), 'job_2.ini')
     tempdir = tempfile.mkdtemp()
     dbserver.ensure_on()
     with mock.patch.dict(os.environ, OQ_DATADIR=tempdir):
         job_id = run_job(job_ini, log_level='error')
         job = commonlib.logs.dbcmd('get_job', job_id)
         self.assertTrue(job.ds_calc_dir.startswith(tempdir),
                         job.ds_calc_dir)
     with Print.patch() as p:
         export('ruptures', job_id, 'csv', tempdir)
     self.assertIn('Exported', str(p))
     shutil.rmtree(tempdir)
Exemplo n.º 12
0
 def test_oqdata(self):
     # the that the environment variable OQ_DATADIR is honored
     job_ini = os.path.join(os.path.dirname(case_2.__file__), 'job_2.ini')
     tempdir = tempfile.mkdtemp()
     dbserver.ensure_on()
     with mock.patch.dict(os.environ, OQ_DATADIR=tempdir):
         [job] = run_jobs(create_jobs([job_ini], 'error'))
         job = commonlib.logs.dbcmd('get_job', job.calc_id)
         self.assertTrue(job.ds_calc_dir.startswith(tempdir),
                         job.ds_calc_dir)
     with Print.patch() as p:
         sap.runline(f'openquake.commands export ruptures {job.id} '
                     f'-e csv --export-dir={tempdir}')
     self.assertIn('Exported', str(p))
     shutil.rmtree(tempdir)
Exemplo n.º 13
0
def main(cmd, args=()):
    """
    Run a database command
    """
    if cmd in commands and len(args) != len(commands[cmd]):
        sys.exit('Wrong number of arguments, expected %s, got %s' %
                 (commands[cmd], args))
    elif (cmd not in commands and not cmd.upper().startswith('SELECT')
          and config.dbserver.multi_user and getpass.getuser() != 'openquake'):
        sys.exit('You have no permission to run %s' % cmd)
    dbserver.ensure_on()
    res = logs.dbcmd(cmd, *convert(args))
    if hasattr(res, '_fields') and res.__class__.__name__ != 'Row':
        print(rst_table(res))
    else:
        print(res)
Exemplo n.º 14
0
def webui(cmd, hostport='127.0.0.1:8800'):
    """
    start the webui server in foreground or perform other operation on the
    django application
    """

    db_path = os.path.expanduser(config.get('dbserver', 'file'))
    if os.path.isfile(db_path) and not os.access(db_path, os.W_OK):
        sys.exit('This command must be run by the proper user: '******'see the documentation for details')
    if cmd == 'start':
        dbserver.ensure_on()  # start the dbserver in a subproces
        rundjango('runserver', hostport)
    elif cmd == 'migrate':
        rundjango('migrate')
    # For backward compatibility with Django 1.6
    elif cmd == 'syncdb':
        rundjango('syncdb')
Exemplo n.º 15
0
def db(cmd, args=()):
    """
    Run a database command
    """
    if cmd not in commands:
        okcmds = '\n'.join(
            '%s %s' % (name, repr(' '.join(args)) if args else '')
            for name, args in sorted(commands.items()))
        print('Invalid command "%s": choose one from\n%s' % (cmd, okcmds))
    elif len(args) != len(commands[cmd]):
        print('Wrong number of arguments, expected %s, got %s' % (
            commands[cmd], args))
    else:
        dbserver.ensure_on()
        res = logs.dbcmd(cmd, *convert(args))
        if hasattr(res, '_fields') and res.__class__.__name__ != 'Row':
            print(rst_table(res))
        else:
            print(res)
Exemplo n.º 16
0
def run(job_ini, slowest=False, hc=None, param='', concurrent_tasks=None,
        exports='', loglevel='info', pdb=None):
    """
    Run a calculation bypassing the database layer
    """
    dbserver.ensure_on()
    if param:
        params = oqvalidation.OqParam.check(
            dict(p.split('=', 1) for p in param.split(',')))
    else:
        params = {}
    if slowest:
        prof = cProfile.Profile()
        stmt = ('_run(job_ini, concurrent_tasks, pdb, loglevel, hc, '
                'exports, params)')
        prof.runctx(stmt, globals(), locals())
        pstat = calc_path + '.pstat'
        prof.dump_stats(pstat)
        print('Saved profiling info in %s' % pstat)
        print(get_pstats(pstat, slowest))
    else:
        _run(job_ini, concurrent_tasks, pdb, loglevel, hc, exports, params)
Exemplo n.º 17
0
def main(cfg_file):
    dbserver.ensure_on()
    startTime = datetime.now()
    cfg = configparser.ConfigParser()
    cfg.read(cfg_file)
    (gmf_file, gmf_file_gmpe_rate, sites, gsim_list, cinfo, oq_param,
     mean_shift_inter_residuals, realizations_inter, realizations_intra,
     intra_files_name, intra_files, csv_rate_gmf_file,
     seed) = read_config_file(cfg)
    gmfs_median = read_input_gmf(gmf_file, gmf_file_gmpe_rate)
    imts = [PGA(), SA(0.3)]
    vs30 = 180
    (std_total, std_inter,
     std_intra) = calculate_total_std(gsim_list, imts, vs30)

    inter_residual, gmpe_imt = calc_inter_residuals(mean_shift_inter_residuals,
                                                    realizations_inter,
                                                    std_inter)

    sp_correlation = cfg['input']['sp_correlation']
    intra_residual, num_intra_matrices = calc_intra_residuals(
        sp_correlation, realizations_intra, intra_files_name, intra_files,
        sites, gmpe_imt, std_intra)
    N = len(sites)
    num_gmfs = (len(gmfs_median) * len(inter_residual['rates_inter']) *
                len(intra_residual['rates_intra']))
    f, calc_id = create_parent_hdf5(N, num_gmfs, sites, cinfo, oq_param)

    zip_intra = create_zip_intra(gsim_list, imts, intra_residual)

    save_hdf5_rate(num_gmfs, csv_rate_gmf_file, gmfs_median, gsim_list,
                   inter_residual, intra_residual, seed, num_intra_matrices,
                   realizations_intra, N, imts, zip_intra, f)

    create_indices(N, f, num_gmfs)

    f.close()
    print('Saved', calc_id)
    print(datetime.now() - startTime)
Exemplo n.º 18
0
def main(job_ini,
         pdb=False,
         reuse_input=False,
         *,
         slowest: int = None,
         hc: int = None,
         param='',
         concurrent_tasks: int = None,
         exports: valid.export_formats = '',
         loglevel='info'):
    """
    Run a calculation
    """
    dbserver.ensure_on()
    if param:
        params = dict(p.split('=', 1) for p in param.split(','))
    else:
        params = {}
    if hc:
        params['hazard_calculation_id'] = str(hc)
    if slowest:
        prof = cProfile.Profile()
        prof.runctx(
            '_run(job_ini[0], 0, pdb, reuse_input, loglevel, '
            'exports, params)', globals(), locals())
        pstat = calc_path + '.pstat'
        prof.dump_stats(pstat)
        print('Saved profiling info in %s' % pstat)
        print(get_pstats(pstat, slowest))
        return
    if len(job_ini) == 1:
        return _run(job_ini[0], concurrent_tasks, pdb, reuse_input, loglevel,
                    exports, params)
    jobs = create_jobs(job_ini, loglevel, hc_id=hc)
    for job in jobs:
        job.params.update(params)
        job.params['exports'] = ','.join(exports)
    run_jobs(jobs)
Exemplo n.º 19
0
def main(no_distribute=False,
         yes=False,
         upgrade_db=False,
         db_version=False,
         what_if_I_upgrade=False,
         list_hazard_calculations=False,
         list_risk_calculations=False,
         delete_uncompleted_calculations=False,
         multi=False,
         reuse_input=False,
         *,
         log_file=None,
         make_html_report=None,
         run=None,
         delete_calculation: int = None,
         hazard_calculation_id: int = None,
         list_outputs: int = None,
         show_log=None,
         export_output=None,
         export_outputs=None,
         param='',
         config_file=None,
         exports='',
         log_level='info'):
    """
    Run a calculation using the traditional command line API
    """
    if not run:
        # configure a basic logging
        logs.init()

    if config_file:
        config.read(os.path.abspath(os.path.expanduser(config_file)),
                    soft_mem_limit=int,
                    hard_mem_limit=int,
                    port=int,
                    multi_user=valid.boolean,
                    serialize_jobs=valid.boolean,
                    strict=valid.boolean,
                    code=exec)

    if no_distribute:
        os.environ['OQ_DISTRIBUTE'] = 'no'

    # check if the datadir exists
    datadir = datastore.get_datadir()
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    dbserver.ensure_on()
    # check if we are talking to the right server
    err = dbserver.check_foreign()
    if err:
        sys.exit(err)

    if upgrade_db:
        msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts')
        if msg.startswith('Your database is already updated'):
            pass
        elif yes or confirm('Proceed? (y/n) '):
            logs.dbcmd('upgrade_db')
        sys.exit(0)

    if db_version:
        safeprint(logs.dbcmd('db_version'))
        sys.exit(0)

    if what_if_I_upgrade:
        safeprint(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts'))
        sys.exit(0)

    # check if the db is outdated
    outdated = logs.dbcmd('check_outdated')
    if outdated:
        sys.exit(outdated)

    # hazard or hazard+risk
    if hazard_calculation_id == -1:
        # get the latest calculation of the current user
        hc_id = get_job_id(hazard_calculation_id, getpass.getuser())
    elif hazard_calculation_id:
        # make it possible to use calculations made by another user
        hc_id = get_job_id(hazard_calculation_id)
    else:
        hc_id = None
    if run:
        pars = dict(p.split('=', 1) for p in param.split(',')) if param else {}
        if reuse_input:
            pars['cachedir'] = datadir
        if hc_id:
            pars['hazard_calculation_id'] = str(hc_id)
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        job_inis = [os.path.expanduser(f) for f in run]
        pars['multi'] = multi
        run_jobs(job_inis, log_level, log_file, exports, **pars)

    # hazard
    elif list_hazard_calculations:
        for line in logs.dbcmd('list_calculations', 'hazard',
                               getpass.getuser()):
            safeprint(line)
    elif delete_calculation is not None:
        del_calculation(delete_calculation, yes)
    # risk
    elif list_risk_calculations:
        for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()):
            safeprint(line)

    # export
    elif make_html_report:
        safeprint('Written %s' % make_report(make_html_report))
        sys.exit(0)

    elif list_outputs is not None:
        hc_id = get_job_id(list_outputs)
        for line in logs.dbcmd('list_outputs', hc_id):
            safeprint(line)
    elif show_log is not None:
        hc_id = get_job_id(show_log)
        for line in logs.dbcmd('get_log', hc_id):
            safeprint(line)

    elif export_output is not None:
        output_id, target_dir = export_output
        dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id))
        for line in core.export_output(dskey, calc_id, datadir,
                                       os.path.expanduser(target_dir), exports
                                       or DEFAULT_EXPORTS):
            safeprint(line)

    elif export_outputs is not None:
        job_id, target_dir = export_outputs
        hc_id = get_job_id(job_id)
        for line in core.export_outputs(hc_id, os.path.expanduser(target_dir),
                                        exports or DEFAULT_EXPORTS):
            safeprint(line)

    elif delete_uncompleted_calculations:
        logs.dbcmd('delete_uncompleted_calculations', getpass.getuser())
    else:
        print("Please pass some option, see oq engine --help")
Exemplo n.º 20
0
    headers = {'User-Agent': 'OpenQuake Engine %s;%s;%s;%s' %
               (__version__, calculation_mode, platform.platform(),
                config.distribution.oq_distribute)}
    try:
        req = Request(OQ_API + '/engine/latest', headers=headers)
        # NB: a timeout < 1 does not work
        data = urlopen(req, timeout=1).read()  # bytes
        tag_name = json.loads(decode(data))['tag_name']
        current = version_triple(__version__)
        latest = version_triple(tag_name)
    except Exception:  # page not available or wrong version tag
        msg = ('An error occurred while calling %s/engine/latest to check'
               ' if the installed version of the engine is up to date.' %
               OQ_API)
        logging.warning(msg, exc_info=True)
        return
    if current < latest:
        return ('Version %s of the engine is available, but you are '
                'still using version %s' % (tag_name, __version__))
    else:
        return ''


if __name__ == '__main__':
    from openquake.server import dbserver
    # run a job object stored in a pickle file, called by job.yaml
    with open(sys.argv[1], 'rb') as f:
        job = pickle.load(f)
    dbserver.ensure_on()
    run_jobs([job])
Exemplo n.º 21
0
def engine(log_file, no_distribute, yes, config_file, make_html_report,
           upgrade_db, version_db, what_if_I_upgrade,
           run_hazard, run_risk, run,
           list_hazard_calculations, list_risk_calculations,
           delete_calculation, delete_uncompleted_calculations,
           hazard_calculation_id, list_outputs, show_log,
           export_output, export_outputs, exports='',
           log_level='info'):
    """
    Run a calculation using the traditional command line API
    """
    config.abort_if_no_config_available()

    if run or run_hazard or run_risk:
        # the logging will be configured in engine.py
        pass
    else:
        # configure a basic logging
        logging.basicConfig(level=logging.INFO)

    if config_file:
        os.environ[config.OQ_CONFIG_FILE_VAR] = os.path.abspath(
            os.path.expanduser(config_file))
        config.refresh()

    if no_distribute:
        os.environ['OQ_DISTRIBUTE'] = 'no'

    # check if the datadir exists
    if not os.path.exists(datastore.DATADIR):
        os.makedirs(datastore.DATADIR)

    dbserver.ensure_on()
    
    if upgrade_db:
        logs.set_level('info')
        msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts')
        if msg.startswith('Your database is already updated'):
            pass
        elif yes or confirm('Proceed? (y/n) '):
            logs.dbcmd('upgrade_db')
        sys.exit(0)

    if version_db:
        print(logs.dbcmd('version_db'))
        sys.exit(0)

    if what_if_I_upgrade:
        print(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts'))
        sys.exit(0)

    # check if the db is outdated
    outdated = logs.dbcmd('check_outdated')
    if outdated:
        sys.exit(outdated)

    # hazard or hazard+risk
    if hazard_calculation_id:
        hc_id = get_job_id(hazard_calculation_id)
    else:
        hc_id = None
    if run:
        job_inis = [os.path.expanduser(ini) for ini in run.split(',')]
        if len(job_inis) not in (1, 2):
            sys.exit('%s should be a .ini filename or a pair of filenames '
                     'separated by a comma' % run)
        for job_ini in job_inis:
            open(job_ini).read()  # raise an IOError if the file does not exist
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None

        if len(job_inis) == 2:
            # run hazard
            job_id = run_job(job_inis[0], log_level,
                             log_file, exports)
            # run risk
            run_job(job_inis[1], log_level, log_file,
                    exports, hazard_calculation_id=job_id)
        else:
            run_job(
                os.path.expanduser(run), log_level, log_file,
                exports, hazard_calculation_id=hc_id)
    # hazard
    elif list_hazard_calculations:
        for line in logs.dbcmd(
                'list_calculations', 'hazard', getpass.getuser()):
            print(line)
    elif run_hazard is not None:
        print('WARN: --rh/--run-hazard are deprecated, use --run instead',
              file=sys.stderr)
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        run_job(os.path.expanduser(run_hazard), log_level,
                log_file, exports)
    elif delete_calculation is not None:
        del_calculation(delete_calculation, yes)
    # risk
    elif list_risk_calculations:
        for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()):
            print(line)
    elif run_risk is not None:
        print('WARN: --rr/--run-risk are deprecated, use --run instead',
              file=sys.stderr)
        if hazard_calculation_id is None:
            sys.exit(MISSING_HAZARD_MSG)
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        run_job(
            os.path.expanduser(run_risk),
            log_level, log_file, exports,
            hazard_calculation_id=hc_id)

    # export
    elif make_html_report:
        print('Written %s' % make_report(make_html_report))
        sys.exit(0)

    elif list_outputs is not None:
        hc_id = get_job_id(list_outputs)
        for line in logs.dbcmd('list_outputs', hc_id):
            print(line)
    elif show_log is not None:
        hc_id = get_job_id(show_log)
        for line in logs.dbcmd('get_log', hc_id):
            print(line)

    elif export_output is not None:
        output_id, target_dir = export_output
        dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id))
        for line in core.export_output(
                dskey, calc_id, datadir, os.path.expanduser(target_dir),
                exports or 'xml,csv'):
            print(line)

    elif export_outputs is not None:
        job_id, target_dir = export_outputs
        hc_id = get_job_id(job_id)
        for line in core.export_outputs(
                hc_id, os.path.expanduser(target_dir), exports or 'xml,csv'):
            print(line)

    elif delete_uncompleted_calculations:
        logs.dbcmd('delete_uncompleted_calculations', getpass.getuser())

    else:
        engine.parentparser.prog = 'oq engine'
        engine.parentparser.print_usage()
Exemplo n.º 22
0
def engine(log_file, no_distribute, yes, config_file, make_html_report,
           upgrade_db, db_version, what_if_I_upgrade,
           run_hazard, run_risk, run,
           list_hazard_calculations, list_risk_calculations,
           delete_calculation, delete_uncompleted_calculations,
           hazard_calculation_id, list_outputs, show_log,
           export_output, export_outputs, exports='',
           log_level='info'):
    """
    Run a calculation using the traditional command line API
    """
    if run or run_hazard or run_risk:
        # the logging will be configured in engine.py
        pass
    else:
        # configure a basic logging
        logging.basicConfig(level=logging.INFO)

    if config_file:
        config.load(os.path.abspath(os.path.expanduser(config_file)))

    if no_distribute:
        os.environ['OQ_DISTRIBUTE'] = 'no'

    # check if the datadir exists
    datadir = datastore.get_datadir()
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    dbserver.ensure_on()
    # check if we are talking to the right server
    err = dbserver.check_foreign()
    if err:
        sys.exit(err)

    if upgrade_db:
        logs.set_level('info')
        msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts')
        if msg.startswith('Your database is already updated'):
            pass
        elif yes or confirm('Proceed? (y/n) '):
            logs.dbcmd('upgrade_db')
        sys.exit(0)

    if db_version:
        safeprint(logs.dbcmd('db_version'))
        sys.exit(0)

    if what_if_I_upgrade:
        safeprint(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts'))
        sys.exit(0)

    # check if the db is outdated
    outdated = logs.dbcmd('check_outdated')
    if outdated:
        sys.exit(outdated)

    # hazard or hazard+risk
    if hazard_calculation_id:
        hc_id = get_job_id(hazard_calculation_id)
    else:
        hc_id = None
    if run:
        job_ini = os.path.expanduser(run)
        open(job_ini, 'rb').read()  # IOError if the file does not exist
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        run_job(os.path.expanduser(run), log_level, log_file,
                exports, hazard_calculation_id=hc_id)
    # hazard
    elif list_hazard_calculations:
        for line in logs.dbcmd(
                'list_calculations', 'hazard', getpass.getuser()):
            safeprint(line)
    elif run_hazard is not None:
        safeprint('WARN: --rh/--run-hazard are deprecated, use --run instead',
                  file=sys.stderr)
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        run_job(os.path.expanduser(run_hazard), log_level,
                log_file, exports)
    elif delete_calculation is not None:
        del_calculation(delete_calculation, yes)
    # risk
    elif list_risk_calculations:
        for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()):
            safeprint(line)
    elif run_risk is not None:
        safeprint('WARN: --rr/--run-risk are deprecated, use --run instead',
                  file=sys.stderr)
        if hazard_calculation_id is None:
            sys.exit(MISSING_HAZARD_MSG)
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        run_job(
            os.path.expanduser(run_risk),
            log_level, log_file, exports,
            hazard_calculation_id=hc_id)

    # export
    elif make_html_report:
        safeprint('Written %s' % make_report(make_html_report))
        sys.exit(0)

    elif list_outputs is not None:
        hc_id = get_job_id(list_outputs)
        for line in logs.dbcmd('list_outputs', hc_id):
            safeprint(line)
    elif show_log is not None:
        hc_id = get_job_id(show_log)
        for line in logs.dbcmd('get_log', hc_id):
            safeprint(line)

    elif export_output is not None:
        output_id, target_dir = export_output
        dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id))
        for line in core.export_output(
                dskey, calc_id, datadir, os.path.expanduser(target_dir),
                exports or 'csv,xml'):
            safeprint(line)

    elif export_outputs is not None:
        job_id, target_dir = export_outputs
        hc_id = get_job_id(job_id)
        for line in core.export_outputs(
                hc_id, os.path.expanduser(target_dir), exports or 'csv,xml'):
            safeprint(line)

    elif delete_uncompleted_calculations:
        logs.dbcmd('delete_uncompleted_calculations', getpass.getuser())

    else:
        engine.parentparser.prog = 'oq engine'
        engine.parentparser.print_usage()
Exemplo n.º 23
0
def engine(log_file, no_distribute, yes, config_file, make_html_report,
           upgrade_db, db_version, what_if_I_upgrade, run,
           list_hazard_calculations, list_risk_calculations,
           delete_calculation, delete_uncompleted_calculations,
           hazard_calculation_id, list_outputs, show_log,
           export_output, export_outputs, exports='',
           log_level='info', reuse_hazard=False):
    """
    Run a calculation using the traditional command line API
    """
    if not run:
        # configure a basic logging
        logs.init()

    if config_file:
        config.read(os.path.abspath(os.path.expanduser(config_file)),
                    soft_mem_limit=int, hard_mem_limit=int, port=int,
                    multi_user=valid.boolean, multi_node=valid.boolean)

    if no_distribute:
        os.environ['OQ_DISTRIBUTE'] = 'no'

    # check if the datadir exists
    datadir = datastore.get_datadir()
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    dbserver.ensure_on()
    # check if we are talking to the right server
    err = dbserver.check_foreign()
    if err:
        sys.exit(err)

    if upgrade_db:
        msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts')
        if msg.startswith('Your database is already updated'):
            pass
        elif yes or confirm('Proceed? (y/n) '):
            logs.dbcmd('upgrade_db')
        sys.exit(0)

    if db_version:
        safeprint(logs.dbcmd('db_version'))
        sys.exit(0)

    if what_if_I_upgrade:
        safeprint(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts'))
        sys.exit(0)

    # check if the db is outdated
    outdated = logs.dbcmd('check_outdated')
    if outdated:
        sys.exit(outdated)

    # hazard or hazard+risk
    if hazard_calculation_id == -1:
        # get the latest calculation of the current user
        hc_id = get_job_id(hazard_calculation_id, getpass.getuser())
    elif hazard_calculation_id:
        # make it possible to use calculations made by another user
        hc_id = get_job_id(hazard_calculation_id)
    else:
        hc_id = None
    if run:
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        job_inis = [os.path.expanduser(f) for f in run]
        if len(job_inis) == 1 and not hc_id:
            # init logs before calling get_oqparam
            logs.init('nojob', getattr(logging, log_level.upper()))
            # not using logs.handle that logs on the db
            oq = readinput.get_oqparam(job_inis[0])
            smart_run(job_inis[0], oq, log_level, log_file,
                      exports, reuse_hazard)
            return
        for i, job_ini in enumerate(job_inis):
            open(job_ini, 'rb').read()  # IOError if the file does not exist
            job_id = run_job(job_ini, log_level, log_file,
                             exports, hazard_calculation_id=hc_id)
            if not hc_id:  # use the first calculation as base for the others
                hc_id = job_id
    # hazard
    elif list_hazard_calculations:
        for line in logs.dbcmd(
                'list_calculations', 'hazard', getpass.getuser()):
            safeprint(line)
    elif delete_calculation is not None:
        del_calculation(delete_calculation, yes)
    # risk
    elif list_risk_calculations:
        for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()):
            safeprint(line)

    # export
    elif make_html_report:
        safeprint('Written %s' % make_report(make_html_report))
        sys.exit(0)

    elif list_outputs is not None:
        hc_id = get_job_id(list_outputs)
        for line in logs.dbcmd('list_outputs', hc_id):
            safeprint(line)
    elif show_log is not None:
        hc_id = get_job_id(show_log)
        for line in logs.dbcmd('get_log', hc_id):
            safeprint(line)

    elif export_output is not None:
        output_id, target_dir = export_output
        dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id))
        for line in core.export_output(
                dskey, calc_id, datadir, os.path.expanduser(target_dir),
                exports or 'csv,xml'):
            safeprint(line)

    elif export_outputs is not None:
        job_id, target_dir = export_outputs
        hc_id = get_job_id(job_id)
        for line in core.export_outputs(
                hc_id, os.path.expanduser(target_dir), exports or 'csv,xml'):
            safeprint(line)

    elif delete_uncompleted_calculations:
        logs.dbcmd('delete_uncompleted_calculations', getpass.getuser())

    else:
        engine.parentparser.prog = 'oq engine'
        engine.parentparser.print_usage()
Exemplo n.º 24
0
def engine(log_file,
           no_distribute,
           yes,
           config_file,
           make_html_report,
           upgrade_db,
           db_version,
           what_if_I_upgrade,
           run,
           list_hazard_calculations,
           list_risk_calculations,
           delete_calculation,
           delete_uncompleted_calculations,
           hazard_calculation_id,
           list_outputs,
           show_log,
           export_output,
           export_outputs,
           exports='',
           log_level='info',
           reuse_hazard=False):
    """
    Run a calculation using the traditional command line API
    """
    if not run:
        # configure a basic logging
        logs.init()

    if config_file:
        config.read(os.path.abspath(os.path.expanduser(config_file)),
                    soft_mem_limit=int,
                    hard_mem_limit=int,
                    port=int,
                    multi_user=valid.boolean,
                    multi_node=valid.boolean)

    if no_distribute:
        os.environ['OQ_DISTRIBUTE'] = 'no'

    # check if the datadir exists
    datadir = datastore.get_datadir()
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    dbserver.ensure_on()
    # check if we are talking to the right server
    err = dbserver.check_foreign()
    if err:
        sys.exit(err)

    if upgrade_db:
        msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts')
        if msg.startswith('Your database is already updated'):
            pass
        elif yes or confirm('Proceed? (y/n) '):
            logs.dbcmd('upgrade_db')
        sys.exit(0)

    if db_version:
        safeprint(logs.dbcmd('db_version'))
        sys.exit(0)

    if what_if_I_upgrade:
        safeprint(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts'))
        sys.exit(0)

    # check if the db is outdated
    outdated = logs.dbcmd('check_outdated')
    if outdated:
        sys.exit(outdated)

    # hazard or hazard+risk
    if hazard_calculation_id == -1:
        # get the latest calculation of the current user
        hc_id = get_job_id(hazard_calculation_id, getpass.getuser())
    elif hazard_calculation_id:
        # make it possible to use calculations made by another user
        hc_id = get_job_id(hazard_calculation_id)
    else:
        hc_id = None
    if run:
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        job_inis = [os.path.expanduser(f) for f in run]
        if len(job_inis) == 1 and not hc_id:
            # init logs before calling get_oqparam
            logs.init('nojob', getattr(logging, log_level.upper()))
            # not using logs.handle that logs on the db
            oq = readinput.get_oqparam(job_inis[0])
            smart_run(job_inis[0], oq, log_level, log_file, exports,
                      reuse_hazard)
            return
        for i, job_ini in enumerate(job_inis):
            open(job_ini, 'rb').read()  # IOError if the file does not exist
            job_id = run_job(job_ini,
                             log_level,
                             log_file,
                             exports,
                             hazard_calculation_id=hc_id)
            if not hc_id:  # use the first calculation as base for the others
                hc_id = job_id
    # hazard
    elif list_hazard_calculations:
        for line in logs.dbcmd('list_calculations', 'hazard',
                               getpass.getuser()):
            safeprint(line)
    elif delete_calculation is not None:
        del_calculation(delete_calculation, yes)
    # risk
    elif list_risk_calculations:
        for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()):
            safeprint(line)

    # export
    elif make_html_report:
        safeprint('Written %s' % make_report(make_html_report))
        sys.exit(0)

    elif list_outputs is not None:
        hc_id = get_job_id(list_outputs)
        for line in logs.dbcmd('list_outputs', hc_id):
            safeprint(line)
    elif show_log is not None:
        hc_id = get_job_id(show_log)
        for line in logs.dbcmd('get_log', hc_id):
            safeprint(line)

    elif export_output is not None:
        output_id, target_dir = export_output
        dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id))
        for line in core.export_output(dskey, calc_id, datadir,
                                       os.path.expanduser(target_dir), exports
                                       or 'csv,xml'):
            safeprint(line)

    elif export_outputs is not None:
        job_id, target_dir = export_outputs
        hc_id = get_job_id(job_id)
        for line in core.export_outputs(hc_id, os.path.expanduser(target_dir),
                                        exports or 'csv,xml'):
            safeprint(line)

    elif delete_uncompleted_calculations:
        logs.dbcmd('delete_uncompleted_calculations', getpass.getuser())

    else:
        engine.parentparser.prog = 'oq engine'
        engine.parentparser.print_usage()