コード例 #1
0
ファイル: importcalc.py プロジェクト: atifrasheed82/oq-engine
def importcalc(host, calc_id, username, password):
    """
    Import a remote calculation into the local database
    """
    if '/' in host.split('//', 1)[1]:
        sys.exit('Wrong host ending with /%s' % host.rsplit('/', 1)[1])
    calc_url = '/'.join([host, 'v1/calc', str(calc_id)])
    dbserver.ensure_on()
    job = logs.dbcmd('get_job', calc_id)
    if job is not None:
        sys.exit('There is already a job #%d in the local db' % calc_id)

    datadir = datastore.get_datadir()
    session = login(host, username, password)
    status = session.get('%s/status' % calc_url)
    if 'Log in to an existing account' in status.text:
        sys.exit('Could not login')
    json = status.json()
    if json["parent_id"]:
        sys.exit('The job has a parent (#%(parent_id)d) and cannot be '
                 'downloaded' % json)
    resp = session.get('%s/datastore' % calc_url, stream=True)
    assert resp.status_code == 200, resp.status_code
    fname = '%s/calc_%d.hdf5' % (datadir, calc_id)
    down = 0
    with open(fname, 'wb') as f:
        logging.info('%s -> %s', calc_url, fname)
        for chunk in resp.iter_content(CHUNKSIZE):
            f.write(chunk)
            down += len(chunk)
            general.println('Downloaded {:,} bytes'.format(down))
    print()
    with datastore.read(calc_id) as dstore:
        engine.expose_outputs(dstore, json['owner'], json['status'])
    logging.info('Imported calculation %d successfully', calc_id)
コード例 #2
0
ファイル: importcalc.py プロジェクト: digitalsatori/oq-engine
def importcalc(calc_id):
    """
    Import a remote calculation into the local database. server, username
    and password must be specified in an openquake.cfg file.
    NB: calc_id can be a local pathname to a datastore not already
    present in the database: in that case it is imported in the db.
    """
    dbserver.ensure_on()
    try:
        calc_id = int(calc_id)
    except ValueError:  # assume calc_id is a pathname
        calc_id, datadir = datastore.extract_calc_id_datadir(calc_id)
        status = 'complete'
        remote = False
    else:
        remote = True
    job = logs.dbcmd('get_job', calc_id)
    if job is not None:
        sys.exit('There is already a job #%d in the local db' % calc_id)
    if remote:
        datadir = datastore.get_datadir()
        webex = WebExtractor(calc_id)
        status = webex.status['status']
        hc_id = webex.oqparam.hazard_calculation_id
        if hc_id:
            sys.exit('The job has a parent (#%d) and cannot be '
                     'downloaded' % hc_id)
        webex.dump('%s/calc_%d.hdf5' % (datadir, calc_id))
        webex.close()
    with datastore.read(calc_id) as dstore:
        engine.expose_outputs(dstore, status=status)
    logging.info('Imported calculation %d successfully', calc_id)
コード例 #3
0
def importcalc(calc_id):
    """
    Import a remote calculation into the local database. server, username
    and password must be specified in an openquake.cfg file.
    NB: calc_id can be a local pathname to a datastore not already
    present in the database: in that case it is imported in the db.
    """
    dbserver.ensure_on()
    try:
        calc_id = int(calc_id)
    except ValueError:  # assume calc_id is a pathname
        remote = False
    else:
        remote = True
        job = logs.dbcmd('get_job', calc_id)
        if job is not None:
            sys.exit('There is already a job #%d in the local db' % calc_id)
    if remote:
        datadir = datastore.get_datadir()
        webex = WebExtractor(calc_id)
        hc_id = webex.oqparam.hazard_calculation_id
        if hc_id:
            sys.exit('The job has a parent (#%d) and cannot be '
                     'downloaded' % hc_id)
        webex.dump('%s/calc_%d.hdf5' % (datadir, calc_id))
        webex.close()
    with datastore.read(calc_id) as dstore:
        engine.expose_outputs(dstore, status='complete')
    logging.info('Imported calculation %s successfully', calc_id)
コード例 #4
0
ファイル: run_tiles.py プロジェクト: talpallikar/oq-engine
def run_tiles(num_tiles, job_ini, poolsize=0):
    """
    Run a hazard calculation by splitting the sites into tiles.
    WARNING: this is experimental and meant only for internal users
    """
    t0 = time.time()
    oq = readinput.get_oqparam(job_ini)
    num_sites = len(readinput.get_mesh(oq))
    task_args = [(job_ini, slc)
                 for slc in general.split_in_slices(num_sites, num_tiles)]
    if poolsize == 0:  # no pool
        Starmap = parallel.Sequential
    elif os.environ.get('OQ_DISTRIBUTE') == 'celery':
        Starmap = parallel.Processmap  # celery plays only with processes
    else:  # multiprocessing plays only with threads
        Starmap = parallel.Threadmap
    parent_child = [None, None]

    def agg(calc_ids, calc_id):
        if not calc_ids:  # first calculation
            parent_child[0] = calc_id
        parent_child[1] = calc_id
        logs.dbcmd('update_parent_child', parent_child)
        logging.warn('Finished calculation %d of %d',
                     len(calc_ids) + 1, num_tiles)
        return calc_ids + [calc_id]

    calc_ids = Starmap(engine.run_tile, task_args, poolsize).reduce(agg, [])
    datadir = datastore.get_datadir()
    for calc_id in calc_ids:
        print(os.path.join(datadir, 'calc_%d.hdf5' % calc_id))
    print('Total calculation time: %.1f h' % ((time.time() - t0) / 3600.))
コード例 #5
0
ファイル: show.py プロジェクト: talpallikar/oq-engine
def show(what='contents', calc_id=-1, extra=()):
    """
    Show the content of a datastore (by default the last one).
    """
    datadir = datastore.get_datadir()
    if what == 'all':  # show all
        if not os.path.exists(datadir):
            return
        rows = []
        for calc_id in datastore.get_calc_ids(datadir):
            try:
                ds = read(calc_id)
                oq = ds['oqparam']
                cmode, descr = oq.calculation_mode, oq.description
            except:
                # invalid datastore file, or missing calculation_mode
                # and description attributes, perhaps due to a manual kill
                f = os.path.join(datadir, 'calc_%s.hdf5' % calc_id)
                logging.warn('Unreadable datastore %s', f)
                continue
            else:
                rows.append((calc_id, cmode, descr.encode('utf-8')))
        for row in sorted(rows, key=lambda row: row[0]):  # by calc_id
            print('#%d %s: %s' % row)
        return

    ds = read(calc_id)

    # this part is experimental
    if what == 'rlzs' and 'poes' in ds:
        min_value = 0.01  # used in rmsep
        getter = getters.PmapGetter(ds)
        sitecol = ds['sitecol']
        pmaps = getter.get_pmaps(sitecol.sids)
        weights = [rlz.weight for rlz in getter.rlzs]
        mean = stats.compute_pmap_stats(pmaps, [numpy.mean], weights)
        dists = []
        for rlz, pmap in zip(getter.rlzs, pmaps):
            dist = rmsep(mean.array, pmap.array, min_value)
            dists.append((dist, rlz))
        print('Realizations in order of distance from the mean curves')
        for dist, rlz in sorted(dists):
            print('%s: rmsep=%s' % (rlz, dist))
    elif view.keyfunc(what) in view:
        print(view(what, ds))
    elif what.split('/', 1)[0] in extract:
        print(extract(ds, what, *extra))
    elif what in ds:
        obj = ds[what]
        if hasattr(obj, 'value'):  # an array
            print(write_csv(io.BytesIO(), obj.value).decode('utf8'))
        else:
            print(obj)
    else:
        print('%s not found' % what)

    ds.close()
コード例 #6
0
ファイル: show.py プロジェクト: tieganh/oq-engine
def show(what='contents', calc_id=-1, extra=()):
    """
    Show the content of a datastore (by default the last one).
    """
    datadir = datastore.get_datadir()
    if what == 'all':  # show all
        if not os.path.exists(datadir):
            return
        rows = []
        for calc_id in datastore.get_calc_ids(datadir):
            try:
                ds = util.read(calc_id)
                oq = ds['oqparam']
                cmode, descr = oq.calculation_mode, oq.description
            except Exception:
                # invalid datastore file, or missing calculation_mode
                # and description attributes, perhaps due to a manual kill
                f = os.path.join(datadir, 'calc_%s.hdf5' % calc_id)
                logging.warning('Unreadable datastore %s', f)
                continue
            else:
                rows.append((calc_id, cmode, descr.encode('utf-8')))
        for row in sorted(rows, key=lambda row: row[0]):  # by calc_id
            print('#%d %s: %s' % row)
        return

    ds = util.read(calc_id)

    # this part is experimental
    if view.keyfunc(what) in view:
        print(view(what, ds))
    elif what.split('/', 1)[0] in extract:
        obj = extract(ds, what, *extra)
        if hasattr(obj, 'dtype') and obj.dtype.names:
            print(write_csv(io.BytesIO(), obj).decode('utf8'))
        else:
            print(obj)
    elif what in ds:
        obj = ds.getitem(what)
        if hasattr(obj, 'items'):  # is a group of datasets
            print(obj)
        else:  # is a single dataset
            obj.refresh()  # for SWMR mode
            aw = hdf5.ArrayWrapper.from_(obj)
            if hasattr(aw, 'shape_descr'):
                print(rst_table(aw.to_table()))
            else:
                print(write_csv(io.BytesIO(), aw.array).decode('utf8'))
    else:
        print('%s not found' % what)

    ds.close()
コード例 #7
0
def init(calc_id='nojob', level=logging.INFO):
    """
    1. initialize the root logger (if not already initialized)
    2. set the format of the root handlers (if any)
    3. return a new calculation ID candidate if calc_id is 'job' or 'nojob'
       (with 'nojob' the calculation ID is not stored in the database)
    """
    if not logging.root.handlers:  # first time
        logging.basicConfig(level=level)
    if calc_id == 'job':  # produce a calc_id by creating a job in the db
        calc_id = dbcmd('create_job', datastore.get_datadir())
    elif calc_id == 'nojob':  # produce a calc_id without creating a job
        calc_id = datastore.get_last_calc_id() + 1
    else:
        calc_id = int(calc_id)
        path = os.path.join(datastore.get_datadir(), 'calc_%d.hdf5' % calc_id)
        if os.path.exists(path):
            raise OSError('%s already exists' % path)
    fmt = '[%(asctime)s #{} %(levelname)s] %(message)s'.format(calc_id)
    for handler in logging.root.handlers:
        f = logging.Formatter(fmt, datefmt='%Y-%m-%d %H:%M:%S')
        handler.setFormatter(f)
    return calc_id
コード例 #8
0
ファイル: run.py プロジェクト: jamesmarek/oq-engine
def _run(job_inis, concurrent_tasks, calc_id, pdb, loglevel, hc, exports,
         params):
    global calc_path
    assert len(job_inis) in (1, 2), job_inis
    # set the logs first of all
    calc_id = logs.init(calc_id, getattr(logging, loglevel.upper()))
    # disable gzip_input
    base.BaseCalculator.gzip_inputs = lambda self: None
    with performance.Monitor('total runtime', measuremem=True) as monitor:
        if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):
            os.environ['OQ_DISTRIBUTE'] = 'processpool'
        if len(job_inis) == 1:  # run hazard or risk
            if hc:
                hc_id = hc[0]
                rlz_ids = hc[1:]
            else:
                hc_id = None
                rlz_ids = ()
            oqparam = readinput.get_oqparam(job_inis[0], hc_id=hc_id)
            if not oqparam.cachedir:  # enable caching
                oqparam.cachedir = datastore.get_datadir()
            if hc_id and hc_id < 0:  # interpret negative calculation ids
                calc_ids = datastore.get_calc_ids()
                try:
                    hc_id = calc_ids[hc_id]
                except IndexError:
                    raise SystemExit('There are %d old calculations, cannot '
                                     'retrieve the %s' %
                                     (len(calc_ids), hc_id))
            calc = base.calculators(oqparam, calc_id)
            calc.run(concurrent_tasks=concurrent_tasks,
                     pdb=pdb,
                     exports=exports,
                     hazard_calculation_id=hc_id,
                     rlz_ids=rlz_ids,
                     **params)
        else:  # run hazard + risk
            calc = run2(job_inis[0], job_inis[1], calc_id, concurrent_tasks,
                        pdb, loglevel, exports, params)

    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    print('See the output with silx view %s' % calc.datastore.filename)
    calc_path, _ = os.path.splitext(calc.datastore.filename)  # used below
    return calc
コード例 #9
0
def run2(job_haz, job_risk, calc_id, concurrent_tasks, pdb, reuse_input,
         loglevel, exports, params):
    """
    Run both hazard and risk, one after the other
    """
    oq = readinput.get_oqparam(job_haz, kw=params)
    hcalc = base.calculators(oq, calc_id)
    hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports)
    hcalc.datastore.close()
    hc_id = hcalc.datastore.calc_id
    rcalc_id = logs.init(level=getattr(logging, loglevel.upper()))
    params['hazard_calculation_id'] = str(hc_id)
    oq = readinput.get_oqparam(job_risk, kw=params)
    rcalc = base.calculators(oq, rcalc_id)
    if reuse_input:  # enable caching
        oq.cachedir = datastore.get_datadir()
    rcalc.run(pdb=pdb, exports=exports)
    return rcalc
コード例 #10
0
def init(calc_id='nojob', level=logging.INFO):
    """
    1. initialize the root logger (if not already initialized)
    2. set the format of the root handlers (if any)
    3. return a new calculation ID candidate if calc_id is None
    """
    if not logging.root.handlers:  # first time
        logging.basicConfig(level=level)
    if calc_id == 'job':  # produce a calc_id by creating a job in the db
        calc_id = dbcmd('create_job', datastore.get_datadir())
    elif calc_id == 'nojob':  # produce a calc_id without creating a job
        calc_id = datastore.get_last_calc_id() + 1
    else:
        assert isinstance(calc_id, int), calc_id
    fmt = '[%(asctime)s #{} %(levelname)s] %(message)s'.format(calc_id)
    for handler in logging.root.handlers:
        handler.setFormatter(logging.Formatter(fmt))
    return calc_id
コード例 #11
0
ファイル: logs.py プロジェクト: digitalsatori/oq-engine
def init(calc_id='nojob', level=logging.INFO):
    """
    1. initialize the root logger (if not already initialized)
    2. set the format of the root handlers (if any)
    3. return a new calculation ID candidate if calc_id is 'job' or 'nojob'
       (with 'nojob' the calculation ID is not stored in the database)
    """
    if not logging.root.handlers:  # first time
        logging.basicConfig(level=level)
    if calc_id == 'job':  # produce a calc_id by creating a job in the db
        calc_id = dbcmd('create_job', datastore.get_datadir())
    elif calc_id == 'nojob':  # produce a calc_id without creating a job
        calc_id = datastore.get_last_calc_id() + 1
    else:
        assert isinstance(calc_id, int), calc_id
    fmt = '[%(asctime)s #{} %(levelname)s] %(message)s'.format(calc_id)
    for handler in logging.root.handlers:
        handler.setFormatter(logging.Formatter(fmt))
    return calc_id
コード例 #12
0
def job_from_file(cfg_file, username, hazard_calculation_id=None):
    """
    Create a full job profile from a job config file.

    :param str cfg_file:
        Path to a job.ini file.
    :param str username:
        The user who will own this job profile and all results
    :param str datadir:
        Data directory of the user
    :param hazard_calculation_id:
        ID of a previous calculation or None
    :returns:
        a pair (job_id, oqparam)
    """
    oq = readinput.get_oqparam(cfg_file, hc_id=hazard_calculation_id)
    job_id = logs.dbcmd('create_job',
                        oq.calculation_mode, oq.description, username,
                        datastore.get_datadir(), hazard_calculation_id)
    return job_id, oq
コード例 #13
0
ファイル: engine.py プロジェクト: jbyronar/oq-engine
def main(no_distribute=False,
         yes=False,
         upgrade_db=False,
         db_version=False,
         what_if_I_upgrade=False,
         list_hazard_calculations=False,
         list_risk_calculations=False,
         delete_uncompleted_calculations=False,
         multi=False,
         reuse_input=False,
         *,
         log_file=None,
         make_html_report=None,
         run=None,
         delete_calculation: int = None,
         hazard_calculation_id: int = None,
         list_outputs: int = None,
         show_log=None,
         export_output=None,
         export_outputs=None,
         param='',
         config_file=None,
         exports='',
         log_level='info'):
    """
    Run a calculation using the traditional command line API
    """
    if not run:
        # configure a basic logging
        logs.init()

    if config_file:
        config.read(os.path.abspath(os.path.expanduser(config_file)),
                    soft_mem_limit=int,
                    hard_mem_limit=int,
                    port=int,
                    multi_user=valid.boolean,
                    serialize_jobs=valid.boolean,
                    strict=valid.boolean,
                    code=exec)

    if no_distribute:
        os.environ['OQ_DISTRIBUTE'] = 'no'

    # check if the datadir exists
    datadir = datastore.get_datadir()
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    dbserver.ensure_on()
    # check if we are talking to the right server
    err = dbserver.check_foreign()
    if err:
        sys.exit(err)

    if upgrade_db:
        msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts')
        if msg.startswith('Your database is already updated'):
            pass
        elif yes or confirm('Proceed? (y/n) '):
            logs.dbcmd('upgrade_db')
        sys.exit(0)

    if db_version:
        safeprint(logs.dbcmd('db_version'))
        sys.exit(0)

    if what_if_I_upgrade:
        safeprint(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts'))
        sys.exit(0)

    # check if the db is outdated
    outdated = logs.dbcmd('check_outdated')
    if outdated:
        sys.exit(outdated)

    # hazard or hazard+risk
    if hazard_calculation_id == -1:
        # get the latest calculation of the current user
        hc_id = get_job_id(hazard_calculation_id, getpass.getuser())
    elif hazard_calculation_id:
        # make it possible to use calculations made by another user
        hc_id = get_job_id(hazard_calculation_id)
    else:
        hc_id = None
    if run:
        pars = dict(p.split('=', 1) for p in param.split(',')) if param else {}
        if reuse_input:
            pars['cachedir'] = datadir
        if hc_id:
            pars['hazard_calculation_id'] = str(hc_id)
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        job_inis = [os.path.expanduser(f) for f in run]
        pars['multi'] = multi
        run_jobs(job_inis, log_level, log_file, exports, **pars)

    # hazard
    elif list_hazard_calculations:
        for line in logs.dbcmd('list_calculations', 'hazard',
                               getpass.getuser()):
            safeprint(line)
    elif delete_calculation is not None:
        del_calculation(delete_calculation, yes)
    # risk
    elif list_risk_calculations:
        for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()):
            safeprint(line)

    # export
    elif make_html_report:
        safeprint('Written %s' % make_report(make_html_report))
        sys.exit(0)

    elif list_outputs is not None:
        hc_id = get_job_id(list_outputs)
        for line in logs.dbcmd('list_outputs', hc_id):
            safeprint(line)
    elif show_log is not None:
        hc_id = get_job_id(show_log)
        for line in logs.dbcmd('get_log', hc_id):
            safeprint(line)

    elif export_output is not None:
        output_id, target_dir = export_output
        dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id))
        for line in core.export_output(dskey, calc_id, datadir,
                                       os.path.expanduser(target_dir), exports
                                       or DEFAULT_EXPORTS):
            safeprint(line)

    elif export_outputs is not None:
        job_id, target_dir = export_outputs
        hc_id = get_job_id(job_id)
        for line in core.export_outputs(hc_id, os.path.expanduser(target_dir),
                                        exports or DEFAULT_EXPORTS):
            safeprint(line)

    elif delete_uncompleted_calculations:
        logs.dbcmd('delete_uncompleted_calculations', getpass.getuser())
    else:
        print("Please pass some option, see oq engine --help")
コード例 #14
0
def engine(log_file, no_distribute, yes, config_file, make_html_report,
           upgrade_db, db_version, what_if_I_upgrade,
           run_hazard, run_risk, run,
           list_hazard_calculations, list_risk_calculations,
           delete_calculation, delete_uncompleted_calculations,
           hazard_calculation_id, list_outputs, show_log,
           export_output, export_outputs, exports='',
           log_level='info'):
    """
    Run a calculation using the traditional command line API
    """
    if run or run_hazard or run_risk:
        # the logging will be configured in engine.py
        pass
    else:
        # configure a basic logging
        logging.basicConfig(level=logging.INFO)

    if config_file:
        config.load(os.path.abspath(os.path.expanduser(config_file)))

    if no_distribute:
        os.environ['OQ_DISTRIBUTE'] = 'no'

    # check if the datadir exists
    datadir = datastore.get_datadir()
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    dbserver.ensure_on()
    # check if we are talking to the right server
    err = dbserver.check_foreign()
    if err:
        sys.exit(err)

    if upgrade_db:
        logs.set_level('info')
        msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts')
        if msg.startswith('Your database is already updated'):
            pass
        elif yes or confirm('Proceed? (y/n) '):
            logs.dbcmd('upgrade_db')
        sys.exit(0)

    if db_version:
        safeprint(logs.dbcmd('db_version'))
        sys.exit(0)

    if what_if_I_upgrade:
        safeprint(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts'))
        sys.exit(0)

    # check if the db is outdated
    outdated = logs.dbcmd('check_outdated')
    if outdated:
        sys.exit(outdated)

    # hazard or hazard+risk
    if hazard_calculation_id:
        hc_id = get_job_id(hazard_calculation_id)
    else:
        hc_id = None
    if run:
        job_ini = os.path.expanduser(run)
        open(job_ini, 'rb').read()  # IOError if the file does not exist
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        run_job(os.path.expanduser(run), log_level, log_file,
                exports, hazard_calculation_id=hc_id)
    # hazard
    elif list_hazard_calculations:
        for line in logs.dbcmd(
                'list_calculations', 'hazard', getpass.getuser()):
            safeprint(line)
    elif run_hazard is not None:
        safeprint('WARN: --rh/--run-hazard are deprecated, use --run instead',
                  file=sys.stderr)
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        run_job(os.path.expanduser(run_hazard), log_level,
                log_file, exports)
    elif delete_calculation is not None:
        del_calculation(delete_calculation, yes)
    # risk
    elif list_risk_calculations:
        for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()):
            safeprint(line)
    elif run_risk is not None:
        safeprint('WARN: --rr/--run-risk are deprecated, use --run instead',
                  file=sys.stderr)
        if hazard_calculation_id is None:
            sys.exit(MISSING_HAZARD_MSG)
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        run_job(
            os.path.expanduser(run_risk),
            log_level, log_file, exports,
            hazard_calculation_id=hc_id)

    # export
    elif make_html_report:
        safeprint('Written %s' % make_report(make_html_report))
        sys.exit(0)

    elif list_outputs is not None:
        hc_id = get_job_id(list_outputs)
        for line in logs.dbcmd('list_outputs', hc_id):
            safeprint(line)
    elif show_log is not None:
        hc_id = get_job_id(show_log)
        for line in logs.dbcmd('get_log', hc_id):
            safeprint(line)

    elif export_output is not None:
        output_id, target_dir = export_output
        dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id))
        for line in core.export_output(
                dskey, calc_id, datadir, os.path.expanduser(target_dir),
                exports or 'csv,xml'):
            safeprint(line)

    elif export_outputs is not None:
        job_id, target_dir = export_outputs
        hc_id = get_job_id(job_id)
        for line in core.export_outputs(
                hc_id, os.path.expanduser(target_dir), exports or 'csv,xml'):
            safeprint(line)

    elif delete_uncompleted_calculations:
        logs.dbcmd('delete_uncompleted_calculations', getpass.getuser())

    else:
        engine.parentparser.prog = 'oq engine'
        engine.parentparser.print_usage()
コード例 #15
0
ファイル: engine.py プロジェクト: gem/oq-engine
def engine(log_file, no_distribute, yes, config_file, make_html_report,
           upgrade_db, db_version, what_if_I_upgrade, run,
           list_hazard_calculations, list_risk_calculations,
           delete_calculation, delete_uncompleted_calculations,
           hazard_calculation_id, list_outputs, show_log,
           export_output, export_outputs, exports='',
           log_level='info', reuse_hazard=False):
    """
    Run a calculation using the traditional command line API
    """
    if not run:
        # configure a basic logging
        logs.init()

    if config_file:
        config.read(os.path.abspath(os.path.expanduser(config_file)),
                    soft_mem_limit=int, hard_mem_limit=int, port=int,
                    multi_user=valid.boolean, multi_node=valid.boolean)

    if no_distribute:
        os.environ['OQ_DISTRIBUTE'] = 'no'

    # check if the datadir exists
    datadir = datastore.get_datadir()
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    dbserver.ensure_on()
    # check if we are talking to the right server
    err = dbserver.check_foreign()
    if err:
        sys.exit(err)

    if upgrade_db:
        msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts')
        if msg.startswith('Your database is already updated'):
            pass
        elif yes or confirm('Proceed? (y/n) '):
            logs.dbcmd('upgrade_db')
        sys.exit(0)

    if db_version:
        safeprint(logs.dbcmd('db_version'))
        sys.exit(0)

    if what_if_I_upgrade:
        safeprint(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts'))
        sys.exit(0)

    # check if the db is outdated
    outdated = logs.dbcmd('check_outdated')
    if outdated:
        sys.exit(outdated)

    # hazard or hazard+risk
    if hazard_calculation_id == -1:
        # get the latest calculation of the current user
        hc_id = get_job_id(hazard_calculation_id, getpass.getuser())
    elif hazard_calculation_id:
        # make it possible to use calculations made by another user
        hc_id = get_job_id(hazard_calculation_id)
    else:
        hc_id = None
    if run:
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        job_inis = [os.path.expanduser(f) for f in run]
        if len(job_inis) == 1 and not hc_id:
            # init logs before calling get_oqparam
            logs.init('nojob', getattr(logging, log_level.upper()))
            # not using logs.handle that logs on the db
            oq = readinput.get_oqparam(job_inis[0])
            smart_run(job_inis[0], oq, log_level, log_file,
                      exports, reuse_hazard)
            return
        for i, job_ini in enumerate(job_inis):
            open(job_ini, 'rb').read()  # IOError if the file does not exist
            job_id = run_job(job_ini, log_level, log_file,
                             exports, hazard_calculation_id=hc_id)
            if not hc_id:  # use the first calculation as base for the others
                hc_id = job_id
    # hazard
    elif list_hazard_calculations:
        for line in logs.dbcmd(
                'list_calculations', 'hazard', getpass.getuser()):
            safeprint(line)
    elif delete_calculation is not None:
        del_calculation(delete_calculation, yes)
    # risk
    elif list_risk_calculations:
        for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()):
            safeprint(line)

    # export
    elif make_html_report:
        safeprint('Written %s' % make_report(make_html_report))
        sys.exit(0)

    elif list_outputs is not None:
        hc_id = get_job_id(list_outputs)
        for line in logs.dbcmd('list_outputs', hc_id):
            safeprint(line)
    elif show_log is not None:
        hc_id = get_job_id(show_log)
        for line in logs.dbcmd('get_log', hc_id):
            safeprint(line)

    elif export_output is not None:
        output_id, target_dir = export_output
        dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id))
        for line in core.export_output(
                dskey, calc_id, datadir, os.path.expanduser(target_dir),
                exports or 'csv,xml'):
            safeprint(line)

    elif export_outputs is not None:
        job_id, target_dir = export_outputs
        hc_id = get_job_id(job_id)
        for line in core.export_outputs(
                hc_id, os.path.expanduser(target_dir), exports or 'csv,xml'):
            safeprint(line)

    elif delete_uncompleted_calculations:
        logs.dbcmd('delete_uncompleted_calculations', getpass.getuser())

    else:
        engine.parentparser.prog = 'oq engine'
        engine.parentparser.print_usage()
コード例 #16
0
ファイル: engine.py プロジェクト: atifrasheed82/oq-engine
def engine(log_file,
           no_distribute,
           yes,
           config_file,
           make_html_report,
           upgrade_db,
           db_version,
           what_if_I_upgrade,
           run,
           list_hazard_calculations,
           list_risk_calculations,
           delete_calculation,
           delete_uncompleted_calculations,
           hazard_calculation_id,
           list_outputs,
           show_log,
           export_output,
           export_outputs,
           exports='',
           log_level='info',
           reuse_hazard=False):
    """
    Run a calculation using the traditional command line API
    """
    if not run:
        # configure a basic logging
        logs.init()

    if config_file:
        config.read(os.path.abspath(os.path.expanduser(config_file)),
                    soft_mem_limit=int,
                    hard_mem_limit=int,
                    port=int,
                    multi_user=valid.boolean,
                    multi_node=valid.boolean)

    if no_distribute:
        os.environ['OQ_DISTRIBUTE'] = 'no'

    # check if the datadir exists
    datadir = datastore.get_datadir()
    if not os.path.exists(datadir):
        os.makedirs(datadir)

    dbserver.ensure_on()
    # check if we are talking to the right server
    err = dbserver.check_foreign()
    if err:
        sys.exit(err)

    if upgrade_db:
        msg = logs.dbcmd('what_if_I_upgrade', 'read_scripts')
        if msg.startswith('Your database is already updated'):
            pass
        elif yes or confirm('Proceed? (y/n) '):
            logs.dbcmd('upgrade_db')
        sys.exit(0)

    if db_version:
        safeprint(logs.dbcmd('db_version'))
        sys.exit(0)

    if what_if_I_upgrade:
        safeprint(logs.dbcmd('what_if_I_upgrade', 'extract_upgrade_scripts'))
        sys.exit(0)

    # check if the db is outdated
    outdated = logs.dbcmd('check_outdated')
    if outdated:
        sys.exit(outdated)

    # hazard or hazard+risk
    if hazard_calculation_id == -1:
        # get the latest calculation of the current user
        hc_id = get_job_id(hazard_calculation_id, getpass.getuser())
    elif hazard_calculation_id:
        # make it possible to use calculations made by another user
        hc_id = get_job_id(hazard_calculation_id)
    else:
        hc_id = None
    if run:
        log_file = os.path.expanduser(log_file) \
            if log_file is not None else None
        job_inis = [os.path.expanduser(f) for f in run]
        if len(job_inis) == 1 and not hc_id:
            # init logs before calling get_oqparam
            logs.init('nojob', getattr(logging, log_level.upper()))
            # not using logs.handle that logs on the db
            oq = readinput.get_oqparam(job_inis[0])
            smart_run(job_inis[0], oq, log_level, log_file, exports,
                      reuse_hazard)
            return
        for i, job_ini in enumerate(job_inis):
            open(job_ini, 'rb').read()  # IOError if the file does not exist
            job_id = run_job(job_ini,
                             log_level,
                             log_file,
                             exports,
                             hazard_calculation_id=hc_id)
            if not hc_id:  # use the first calculation as base for the others
                hc_id = job_id
    # hazard
    elif list_hazard_calculations:
        for line in logs.dbcmd('list_calculations', 'hazard',
                               getpass.getuser()):
            safeprint(line)
    elif delete_calculation is not None:
        del_calculation(delete_calculation, yes)
    # risk
    elif list_risk_calculations:
        for line in logs.dbcmd('list_calculations', 'risk', getpass.getuser()):
            safeprint(line)

    # export
    elif make_html_report:
        safeprint('Written %s' % make_report(make_html_report))
        sys.exit(0)

    elif list_outputs is not None:
        hc_id = get_job_id(list_outputs)
        for line in logs.dbcmd('list_outputs', hc_id):
            safeprint(line)
    elif show_log is not None:
        hc_id = get_job_id(show_log)
        for line in logs.dbcmd('get_log', hc_id):
            safeprint(line)

    elif export_output is not None:
        output_id, target_dir = export_output
        dskey, calc_id, datadir = logs.dbcmd('get_output', int(output_id))
        for line in core.export_output(dskey, calc_id, datadir,
                                       os.path.expanduser(target_dir), exports
                                       or 'csv,xml'):
            safeprint(line)

    elif export_outputs is not None:
        job_id, target_dir = export_outputs
        hc_id = get_job_id(job_id)
        for line in core.export_outputs(hc_id, os.path.expanduser(target_dir),
                                        exports or 'csv,xml'):
            safeprint(line)

    elif delete_uncompleted_calculations:
        logs.dbcmd('delete_uncompleted_calculations', getpass.getuser())

    else:
        engine.parentparser.prog = 'oq engine'
        engine.parentparser.print_usage()
コード例 #17
0
ファイル: purge.py プロジェクト: khosak/oq-engine
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import getpass
from openquake.baselib import sap, datastore
from openquake.commonlib.logs import dbcmd

datadir = datastore.get_datadir()


def purge_one(calc_id, user, force):
    """
    Remove one calculation ID from the database and remove its datastore
    """
    dbcmd('del_calc', calc_id, user, force)
    f1 = os.path.join(datadir, 'calc_%s.hdf5' % calc_id)
    f2 = os.path.join(datadir, 'calc_%s_tmp.hdf5' % calc_id)
    for f in [f1, f2]:
        if os.path.exists(f):  # not removed yet
            os.remove(f)
            print('Removed %s' % f)

コード例 #18
0
ファイル: tot_ruptures.py プロジェクト: tieganh/oq-engine
Determine the total number of ruptures in all the calculations in oqdata
"""
import glob
from openquake.baselib.datastore import get_datadir, read
from openquake.calculators.views import rst_table


def main(datadir):
    lst = []
    for fname in glob.glob(datadir + '/calc_*.hdf5'):
        try:
            dstore = read(fname)
        except OSError:  # already open
            continue
        with dstore:
            try:
                descr = dstore['oqparam'].description
            except (KeyError, AttributeError):  # not a calculation
                continue
            try:
                tot_ruptures = dstore['csm_info/sg_data']['totrup'].sum()
            except KeyError:
                tot_ruptures = 0
            else:
                lst.append((descr, tot_ruptures))
    print(rst_table(lst, ['calculation', 'total number of ruptures']))


if __name__ == '__main__':
    main(get_datadir())
コード例 #19
0
ファイル: settings.py プロジェクト: digitalsatori/oq-engine
# multiple installations of the Engine are available. This helps avoiding
# confusion between different installations when the WebUI is used
SERVER_NAME = socket.gethostname()

# Expose the WebUI interface, otherwise only the REST API will be available
WEBUI = True

# OpenQuake Standalone tools (IPT, Taxtweb, Taxonomy Glossary)
if STANDALONE and WEBUI:
    INSTALLED_APPS += (
        'openquakeplatform',
    )

    INSTALLED_APPS += STANDALONE_APPS

    FILE_PATH_FIELD_DIRECTORY = datastore.get_datadir()

    CONTEXT_PROCESSORS = TEMPLATES[0]['OPTIONS']['context_processors']
    CONTEXT_PROCESSORS.insert(0, 'django.template.context_processors.request')
    CONTEXT_PROCESSORS.append('openquakeplatform.utils.oq_context_processor')

try:
    # Try to load a local_settings.py from the current folder; this is useful
    # when packages are used. A custom local_settings.py can be placed in
    # /usr/share/openquake/engine, avoiding changes inside the python package
    from local_settings import *
except ImportError:
    # If no local_settings.py is availble in the current folder let's try to
    # load it from openquake/server/local_settings.py
    try:
        from openquake.server.local_settings import *
コード例 #20
0
}

FILE_UPLOAD_MAX_MEMORY_SIZE = 1

# A server name can be specified to customize the WebUI in case of
# multiple installations of the Engine are available. This helps avoiding
# confusion between different installations when the WebUI is used
SERVER_NAME = socket.gethostname()

# OpenQuake Standalone tools (IPT, Taxtweb, Taxonomy Glossary)
if STANDALONE:
    INSTALLED_APPS += ('openquakeplatform', )

    INSTALLED_APPS += STANDALONE_APPS

    FILE_PATH_FIELD_DIRECTORY = datastore.get_datadir()

    CONTEXT_PROCESSORS = TEMPLATES[0]['OPTIONS']['context_processors']
    CONTEXT_PROCESSORS.insert(0, 'django.template.context_processors.request')
    CONTEXT_PROCESSORS.append('openquakeplatform.utils.oq_context_processor')

try:
    # Try to load a local_settings.py from the current folder; this is useful
    # when packages are used. A custom local_settings.py can be placed in
    # /usr/share/openquake/engine, avoiding changes inside the python package
    from local_settings import *
except ImportError:
    # If no local_settings.py is availble in the current folder let's try to
    # load it from openquake/server/local_settings.py
    try:
        from openquake.server.local_settings import *
コード例 #21
0
Determine the total number of ruptures in all the calculations in oqdata
"""
import glob
from openquake.baselib.datastore import get_datadir, read
from openquake.calculators.views import rst_table


def main(datadir):
    lst = []
    for fname in glob.glob(datadir + '/calc_*.hdf5'):
        try:
            dstore = read(fname)
        except OSError:  # already open
            continue
        with dstore:
            try:
                descr = dstore['oqparam'].description
            except (KeyError, AttributeError):  # not a calculation
                continue
            try:
                tot_ruptures = dstore['csm_info/sg_data']['totrup'].sum()
            except KeyError:
                tot_ruptures = 0
            else:
                lst.append((descr, tot_ruptures))
    print(rst_table(lst, ['calculation', 'total number of ruptures']))


if __name__ == '__main__':
    main(get_datadir())
コード例 #22
0
ファイル: purge.py プロジェクト: digitalsatori/oq-engine
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import getpass
import shutil
from openquake.baselib import sap, datastore
from openquake.commonlib.logs import dbcmd

datadir = datastore.get_datadir()


def purge_one(calc_id, user):
    """
    Remove one calculation ID from the database and remove its datastore
    """
    filename = os.path.join(datadir, 'calc_%s.hdf5' % calc_id)
    err = dbcmd('del_calc', calc_id, user)
    if err:
        print(err)
    elif os.path.exists(filename):  # not removed yet
        os.remove(filename)
        print('Removed %s' % filename)