Example #1
0
 def test_ok(self):
     p = sap.Parser(f)
     p.arg('a', 'first argument')
     p.opt('b', 'second argument')
     p.flg('c', 'third argument')
     p.opt('d', 'fourth argument')
     self.assertEqual(
         ['1', '2', False, '3'], p.callfunc('1 -b=2 -d=3'.split()))
Example #2
0
    def test_long_argument(self):
        # test the replacement '_' -> '-' in variable names
        p = sap.Parser(lambda a_long_argument: None)
        p.opt('a_long_argument', 'a long argument')
        self.assertEqual(p.help(), '''\
usage: %s [-h] [-a A_LONG_ARGUMENT]

optional arguments:
  -h, --help            show this help message and exit
  -a A_LONG_ARGUMENT, --a-long-argument A_LONG_ARGUMENT
                        a long argument
''' % p.parentparser.prog)
Example #3
0
    def test_no_help(self):
        p = sap.Parser(f, help=None)
        p.arg('a', 'first argument')
        p.opt('b', 'second argument')
        self.assertEqual(p.help(), '''\
usage: %s [-b B] a

positional arguments:
  a            first argument

optional arguments:
  -b B, --b B  second argument
''' % p.parentparser.prog)
        # missing argparse specification for 'c'
        with self.assertRaises(NameError):
            p.check_arguments()
Example #4
0
    def test_group(self):
        p = sap.Parser(f)
        p.arg('a', 'first argument')
        p.opt('b', 'second argument')
        p.group('other arguments')
        p.flg('c', 'third argument')
        p.opt('d', 'fourth argument')
        self.assertEqual(p.help(), '''\
usage: %s [-h] [-b B] [-c] [-d 1] a

positional arguments:
  a            first argument

optional arguments:
  -h, --help   show this help message and exit
  -b B, --b B  second argument

other arguments:
  -c, --c      third argument
  -d 1, --d 1  fourth argument
''' % p.parentparser.prog)
Example #5
0
                    queue.put((conn, cmd, args))
        finally:
            listener.close()
            self.thread.join()


def runserver(dbpathport=None, logfile=DATABASE['LOG'], loglevel='WARN'):
    logging.basicConfig(level=getattr(logging, loglevel), filename=logfile)
    if dbpathport:  # assume a string of the form "dbpath:port"
        dbpath, port = dbpathport.split(':')
        addr = (DATABASE['HOST'], int(port))
        DATABASE['NAME'] = dbpath
        DATABASE['PORT'] = int(port)
    else:
        addr = config.DBS_ADDRESS

    # create and upgrade the db if needed
    connection.cursor()  # bind the db
    actions.upgrade_db()

    # start the server
    DbServer(addr, config.DBS_AUTHKEY).loop()

parser = sap.Parser(runserver)
parser.arg('dbpathport', 'dbpath:port')
parser.arg('logfile', 'log file')
parser.opt('loglevel', 'WARN or INFO')

if __name__ == '__main__':
    parser.callfunc()
Example #6
0
 def test_NameError(self):
     p = sap.Parser(f)
     p.arg('a', 'first argument')
     with self.assertRaises(NameError):
         p.flg('c', 'third argument')
Example #7
0
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.

from __future__ import print_function
import os
import re
from openquake.commonlib import sap, datastore


def purge(calc_id):
    """
    Remove the given calculation. If calc_id is 0, remove all calculations.
    """
    if not calc_id:
        for fname in os.listdir(datastore.DATADIR):
            if re.match('calc_\d+\.hdf5', fname):
                os.remove(os.path.join(datastore.DATADIR, fname))
                print('Removed %s' % fname)
    else:
        hdf5path = datastore.read(calc_id).hdf5path
        os.remove(hdf5path)
        print('Removed %s' % hdf5path)


parser = sap.Parser(purge)
parser.arg('calc_id', 'calculation ID', type=int)
Example #8
0
from openquake.baselib import general, performance
from openquake.commonlib import sap, datastore
from openquake.commonlib.export import export as export_


# the export is tested in the demos
def export(calc_id, datastore_key, format='csv', export_dir='.'):
    """
    Export an output from the datastore.
    """
    logging.basicConfig(level=logging.INFO)
    dstore = datastore.DataStore(calc_id)
    dstore.export_dir = export_dir
    hc_id = dstore['oqparam'].hazard_calculation_id
    if hc_id:
        dstore.parent = datastore.DataStore(hc_id)
    with performance.Monitor('export', measuremem=True) as mon:
        for fmt in format.split(','):
            fnames = export_((datastore_key, fmt), dstore)
            nbytes = sum(os.path.getsize(f) for f in fnames)
            print('Exported %s in %s' % (general.humansize(nbytes), fnames))
    if mon.duration > 1:
        print(mon)


parser = sap.Parser(export)
parser.arg('calc_id', 'number of the calculation', type=int)
parser.arg('datastore_key', 'datastore key')
parser.arg('format', 'export formats (comma separated)')
parser.arg('export_dir', 'export directory')
Example #9
0
            for line in lines:
                f.write(line)
        print('Extracted %d lines out of %d' % (len(lines), len(all_lines)))
        return
    model, = nrml.read(fname)
    if model.tag.endswith('exposureModel'):
        total = len(model.assets)
        model.assets.nodes = random_filter(model.assets, reduction_factor)
        num_nodes = len(model.assets)
    elif model.tag.endswith('siteModel'):
        total = len(model)
        model.nodes = random_filter(model, reduction_factor)
        num_nodes = len(model)
    elif model.tag.endswith('sourceModel'):
        total = len(model)
        model.nodes = random_filter(model, reduction_factor)
        num_nodes = len(model)
    else:
        raise RuntimeError('Unknown model tag: %s' % model.tag)
    shutil.copy(fname, fname + '.bak')
    print('Copied the original file in %s.bak' % fname)
    with open(fname, 'w') as f:
        nrml.write([model], f)
    print('Extracted %d nodes out of %d' % (num_nodes, total))


parser = sap.Parser(reduce)
parser.arg('fname', 'path to the model file')
parser.arg('reduction_factor', 'reduction factor in the range 0..1',
           type=valid.probability)
Example #10
0
    :param sites: comma-separated string with the site indices
    """
    # read the hazard data
    haz = datastore.read(calc_id)
    other = datastore.read(other_id) if other_id else None
    oq = haz['oqparam']
    indices = list(map(int, sites.split(',')))
    n_sites = len(haz['sitemesh'])
    if not set(indices) <= set(range(n_sites)):
        invalid = sorted(set(indices) - set(range(n_sites)))
        print('The indices %s are invalid: no graph for them' % invalid)
    valid = sorted(set(range(n_sites)) & set(indices))
    print('Found %d site(s); plotting %d of them' % (n_sites, len(valid)))
    curves_by_rlz, mean_curves = get_hcurves_and_means(haz)
    if other is None:
        single_curve = len(curves_by_rlz) == 1 or not getattr(
            oq, 'individual_curves', True)
        plt = make_figure(valid, oq.imtls, mean_curves,
                          {} if single_curve else curves_by_rlz, 'mean')
    else:
        _, mean1 = get_hcurves_and_means(haz)
        _, mean2 = get_hcurves_and_means(other)
        plt = make_figure(valid, oq.imtls, mean1, {'mean': mean2}, 'reference')
    plt.show()


parser = sap.Parser(plot)
parser.arg('calc_id', 'a computation id', type=int)
parser.arg('other_id', 'optional id of another computation', type=int)
parser.opt('sites', 'comma-separated string with the site indices')
Example #11
0
"""

import time
import json
import urllib
from openquake.commonlib import sap


def main(calc_id, host='localhost', port=8000):
    base_url = 'http://%s:%s/v1/calc/' % (host, port)
    start = 0
    psize = 10  # page size
    try:
        while True:
            url = base_url + '%d/log/%d:%d' % (calc_id, start, start + psize)
            rows = json.load(urllib.urlopen(url))
            for row in rows:
                print ' '.join(row)
            start += len(rows)
            time.sleep(1)
    except:
        pass


if __name__ == '__main__':
    parser = sap.Parser(main)
    parser.arg('calc_id', 'calculation ID', type=int)
    parser.arg('host', 'hostname of the engine server')
    parser.arg('port', 'port of the engine server')
    parser.callfunc()
Example #12
0
            assetcol = riskinput.build_asset_collection(assets_by_site)
            dic = groupby(assetcol, operator.attrgetter('taxonomy'))
            for taxo, num in dic.items():
                print('taxonomy #%d, %d assets' % (taxo, num))
            print('total assets = %d' % len(assetcol))
    else:
        print("No info for '%s'" % name)


def info(name, filtersources=False, weightsources=False, report=False):
    """
    Give information. You can pass the name of an available calculator,
    a job.ini file, or a zip archive with the input files.
    """
    logging.basicConfig(level=logging.INFO)
    with PerformanceMonitor('info', measuremem=True) as mon:
        if report:
            tmp = tempfile.gettempdir()
            print('Generated', reportwriter.build_report(name, tmp))
        else:
            _info(name, filtersources, weightsources)
    if mon.duration > 1:
        print(mon)


parser = sap.Parser(info)
parser.arg('name', 'calculator name, job.ini file or zip archive')
parser.flg('filtersources', 'flag to enable filtering of the source models')
parser.flg('weightsources', 'flag to enable weighting of the source models')
parser.flg('report', 'flag to enable building a report in rst format')
Example #13
0
    NRML version. Works by walking all subdirectories.
    WARNING: there is no downgrade!
    """
    for cwd, dirs, files in os.walk(directory):
        for f in files:
            path = os.path.join(cwd, f)
            if f.endswith('.xml'):
                ip = iterparse(path)
                try:
                    fulltag = ip.next()[1].tag
                    xmlns, tag = fulltag.split('}')
                except:  # not a NRML file
                    pass
                if xmlns[1:] == NRML05:  # already upgraded
                    pass
                elif 'nrml/0.4' in xmlns and 'vulnerability' in f:
                    if not dry_run:
                        print('Upgrading', path)
                        try:
                            upgrade_file(path)
                        except Exception as exc:
                            print(exc)
                    else:
                        print('Not upgrading', path)
                ip._file.close()


parser = sap.Parser(upgrade_nrml)
parser.arg('directory', 'directory to consider')
parser.flg('dry_run', 'test the upgrade without replacing the files')
Example #14
0
    dstore = datastore.read(calc_id)
    sitecol = dstore['sitecol']
    csm = dstore['composite_source_model']
    oq = dstore['oqparam']
    fig = p.figure()
    ax = fig.add_subplot(111)
    ax.grid(True)
    tiles = make_tiles(sitecol, oq.sites_per_tile, oq.maximum_distance)
    print 'There are %d tiles' % len(tiles)
    for tile in tiles:
        xs = []
        ys = []
        area = []
        for src in csm.get_sources():
            if src in tile and getattr(src, 'location', None):
                xs.append(src.location.x)
                ys.append(src.location.y)
                radius = src._get_max_rupture_projection_radius()
                r = (tile.maximum_distance[src.tectonic_region_type] +
                     radius) / tile.KM_ONE_DEGREE
                a = numpy.pi * r**2
                area.append(a)
        ax.add_patch(Rectangle(*tile.get_rectangle(), fill=False))
        p.scatter(tile.fix_lons(xs), ys, marker='o', s=area)
        p.scatter(tile.fix_lons(sitecol.lons), sitecol.lats, marker='+')
    p.show()


parser = sap.Parser(plot_sites)
parser.arg('calc_id', 'a computation id', type=int)
Example #15
0
    ax.grid(True)
    ax.set_ylim([0, 1])
    ax.set_xlabel(output_key)
    ax.set_ylabel('PoE')
    ax.plot(losses, poes)
    return plt


def plot_loss(risk_pik, output_key):
    """
    Loss curves plotter. For the moment it is restricted to the
    aggregate curves.

    :param risk_pik: the pathname to a pickled file
    :param output_key: an unique string for the output to plot
    """
    # read the data
    with open(risk_pik) as f:
        out = pickle.load(f)
    if output_key not in out:
        print('key %s not found: availables %s' % (output_key, sorted(out)))
        return
    loss_curve = out[output_key]
    plt = make_figure(output_key, loss_curve['losses'], loss_curve['poes'])
    plt.show()


parser = sap.Parser(plot_loss)
parser.arg('risk_pik', '.pik file containing the result of a computation')
parser.arg('output_key', 'an unique string for the output to plot')
Example #16
0
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.

from __future__ import print_function
from openquake.commonlib import sap, nrml


def tidy(fnames):
    """
    Reformat a NRML file in a canonical form. That also means reducing the
    precision of the floats to a standard value. If the file is invalid,
    a clear error message is shown.
    """
    for fname in fnames:
        try:
            nodes = nrml.read(fname).nodes
        except ValueError as err:
            print(err)
            return
        with open(fname + '.bak', 'w') as f:
            f.write(open(fname).read())
        with open(fname, 'w') as f:
            nrml.write(nodes, f)
        print('Reformatted %s, original left in %s.bak' % (fname, fname))

parser = sap.Parser(tidy)
parser.arg('fnames', 'NRML file name', nargs='+')
Example #17
0
        if hasattr(obj, 'value'):  # an array
            print(write_csv(io.StringIO(), obj.value))
        else:
            print(obj)
        return
    # print all keys
    oq = OqParam.from_(ds.attrs)
    print(oq.calculation_mode, 'calculation (%r) saved in %s contains:' %
          (oq.description, ds.hdf5path))
    for key in ds:
        print(key, humansize(ds.getsize(key)))

    # this part is experimental and not tested on purpose
    if rlzs and 'curves_by_trt_gsim' in ds:
        min_value = 0.01  # used in rmsep
        curves_by_rlz, mean_curves = combined_curves(ds)
        dists = []
        for rlz in sorted(curves_by_rlz):
            curves = curves_by_rlz[rlz]
            dist = sum(rmsep(mean_curves[imt], curves[imt], min_value)
                       for imt in mean_curves.dtype.fields)
            dists.append((dist, rlz))
        for dist, rlz in sorted(dists):
            print('rlz=%s, rmsep=%s' % (rlz, dist))


parser = sap.Parser(show)
parser.arg('calc_id', 'calculation ID', type=int)
parser.arg('key', 'key of the datastore')
parser.flg('rlzs', 'print out the realizations')
Example #18
0
                hc = calc_ids[hc]
            except IndexError:
                raise SystemExit('There are %d old calculations, cannot '
                                 'retrieve the %s' % (len(calc_ids), hc))
        calc = base.calculators(oqparam, monitor)
        monitor.monitor_dir = calc.datastore.calc_dir
        with monitor:
            calc.run(concurrent_tasks=concurrent_tasks, exports=exports,
                     hazard_calculation_id=hc)
    else:  # run hazard + risk
        calc = run2(
            job_inis[0], job_inis[1], concurrent_tasks, exports, monitor)

    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    monitor.flush()
    print('See the output with hdfview %s/output.hdf5' %
          calc.datastore.calc_dir)
    return calc

parser = sap.Parser(run)
parser.arg('job_ini', 'calculation configuration file '
           '(or files, comma-separated)')
parser.opt('concurrent_tasks', 'hint for the number of tasks to spawn',
           type=int)
parser.opt('loglevel', 'logging level',
           choices='debug info warn error critical'.split())
parser.opt('hc', 'previous calculation ID', type=int)
parser.opt('exports', 'export formats as a comma-separated string',
           type=valid.export_formats)
Example #19
0
#  You should have received a copy of the GNU Affero General Public License
#  along with OpenQuake.  If not, see <http://www.gnu.org/licenses/>.

from __future__ import print_function
from openquake.commonlib import sap, datastore


def show_attrs(calc_id, key):
    """
    Show the attributes of a HDF5 dataset in the datastore

    :param calc_id: numeric calculation ID
    :param key: key of the datastore
    """
    ds = datastore.DataStore(calc_id)
    try:
        attrs = ds[key].attrs
    except KeyError:
        print('%r is not in %s' % (key, ds))
        return
    if len(attrs) == 0:
        print('%s has no attributes' % key)
    for name, value in attrs.items():
        print(name, value)


parser = sap.Parser(show_attrs)
parser.arg('calc_id', 'calculation ID', type=int)
parser.arg('key', 'key of the datastore')