Exemple #1
0
def zip_source_model(ssmLT, archive_zip='', log=logging.info):
    """
    Zip the source model files starting from the smmLT.xml file
    """
    basedir = os.path.dirname(ssmLT)
    if os.path.basename(ssmLT) != 'ssmLT.xml':
        orig = ssmLT
        ssmLT = os.path.join(basedir, 'ssmLT.xml')
        with open(ssmLT, 'wb') as f:
            f.write(open(orig, 'rb').read())

    archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip')
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT})
    checksum = readinput.get_checksum32(oq)
    checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt')
    with open(checkfile, 'w') as f:
        f.write(str(checksum))
    files = logictree.collect_info(ssmLT).smpaths + [
        os.path.abspath(ssmLT),
        os.path.abspath(checkfile)
    ]
    general.zipfiles(files, archive_zip, log=log, cleanup=True)
    return archive_zip
Exemple #2
0
def zip_source_model(ssmLT, archive_zip='', log=logging.info):
    """
    Zip the source model files starting from the smmLT.xml file
    """
    basedir = os.path.dirname(ssmLT)
    if os.path.basename(ssmLT) != 'ssmLT.xml':
        orig = ssmLT
        ssmLT = os.path.join(basedir, 'ssmLT.xml')
        with open(ssmLT, 'wb') as f:
            f.write(open(orig, 'rb').read())

    archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip')
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    info = logictree.collect_info(ssmLT)
    files = info.h5paths + info.smpaths
    oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT},
                   random_seed=42,
                   number_of_logic_tree_samples=0,
                   sampling_method='early_weights')
    oq._input_files = readinput.get_input_files(oq)
    checksum = readinput.get_checksum32(oq)
    checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt')
    with open(checkfile, 'w') as f:
        f.write(str(checksum))
    files.extend([os.path.abspath(ssmLT), os.path.abspath(checkfile)])
    general.zipfiles(files, archive_zip, log=log, cleanup=True)
    return archive_zip
Exemple #3
0
def reduce_source_model(smlt_file, source_ids, remove=True):
    """
    Extract sources from the composite source model.

    :param smlt_file: path to a source model logic tree file
    :param source_ids: dictionary source_id -> records (src_id, code)
    :param remove: if True, remove sm.xml files containing no sources
    :returns: the number of sources satisfying the filter vs the total
    """
    total = good = 0
    to_remove = set()
    paths = logictree.collect_info(smlt_file).smpaths
    for dic in parallel.Starmap.apply(reduce_sm, (paths, source_ids)):
        path = dic['path']
        model = dic['model']
        good += dic['good']
        total += dic['total']
        shutil.copy(path, path + '.bak')
        if model:
            with open(path, 'wb') as f:
                nrml.write([model], f, xmlns=dic['xmlns'])
        elif remove:  # remove the files completely reduced
            to_remove.add(path)
    if good:
        for path in to_remove:
            os.remove(path)
    parallel.Starmap.shutdown()
    return good, total
Exemple #4
0
def checksum(thing):
    """
    Get the checksum of a calculation from the calculation ID (if already
    done) or from the job.ini/job.zip file (if not done yet). If `thing`
    is a source model logic tree file, get the checksum of the model by
    ignoring the job.ini, the gmpe logic tree file and possibly other files.
    """
    try:
        job_id = int(thing)
        job_file = None
    except ValueError:
        job_id = None
        job_file = thing
        if not os.path.exists(job_file):
            sys.exit('%s does not correspond to an existing file' % job_file)
    if job_id:
        dstore = datastore.read(job_id)
        checksum = dstore['/'].attrs['checksum32']
    elif job_file.endswith('.xml'):  # assume it is a smlt file
        inputs = {p: p for p in logictree.collect_info(job_file).smpaths}
        checksum = readinput.get_checksum32(inputs)
    else:
        oq = readinput.get_oqparam(job_file)
        checksum = readinput.get_checksum32(oq.inputs)
    print(checksum)
Exemple #5
0
def reduce_source_model(smlt_file, source_ids, remove=True):
    """
    Extract sources from the composite source model
    """
    for path in logictree.collect_info(smlt_file).smpaths:
        root = nrml.read(path)
        model = Node('sourceModel', root[0].attrib)
        origmodel = root[0]
        if root['xmlns'] == 'http://openquake.org/xmlns/nrml/0.4':
            for src_node in origmodel:
                if src_node['id'] in source_ids:
                    model.nodes.append(src_node)
        else:  # nrml/0.5
            for src_group in origmodel:
                sg = copy.copy(src_group)
                sg.nodes = []
                weights = src_group.get('srcs_weights')
                if weights:
                    assert len(weights) == len(src_group.nodes)
                else:
                    weights = [1] * len(src_group.nodes)
                src_group['srcs_weights'] = reduced_weigths = []
                for src_node, weight in zip(src_group, weights):
                    if src_node['id'] in source_ids:
                        sg.nodes.append(src_node)
                        reduced_weigths.append(weight)
                if sg.nodes:
                    model.nodes.append(sg)
        shutil.copy(path, path + '.bak')
        if model:
            with open(path, 'wb') as f:
                nrml.write([model], f, xmlns=root['xmlns'])
                logging.warn('Reduced %s' % path)
        elif remove:  # remove the files completely reduced
            os.remove(path)
Exemple #6
0
def get_input_files(oqparam):
    """
    :param oqparam: an OqParam instance
    :param hazard: if True, consider only the hazard files
    :returns: input path names in a specific order
    """
    fnames = set()  # files entering in the checksum
    uri = oqparam.shakemap_uri
    if isinstance(uri, dict) and uri:
        # local files
        for key, val in uri.items():
            if key == 'fname' or key.endswith('_url'):
                val = val.replace('file://', '')
                fname = os.path.join(oqparam.base_path, val)
                if os.path.exists(fname):
                    uri[key] = fname
                    fnames.add(fname)
        # additional separate shapefiles
        if uri['kind'] == 'shapefile' and not uri['fname'].endswith('.zip'):
            fnames.update(get_shapefiles(os.path.dirname(fname)))

    for key in oqparam.inputs:
        fname = oqparam.inputs[key]
        # collect .hdf5 tables for the GSIMs, if any
        if key == 'gsim_logic_tree':
            fnames.update(gsim_lt.collect_files(fname))
            fnames.add(fname)
        elif key == 'source_model':  # UCERF
            f = oqparam.inputs['source_model']
            fnames.add(f)
            fname = nrml.read(f).sourceModel.UCERFSource['filename']
            fnames.add(os.path.join(os.path.dirname(f), fname))
        elif key == 'exposure':  # fname is a list
            for exp in asset.Exposure.read_headers(fname):
                fnames.update(exp.datafiles)
            fnames.update(fname)
        elif isinstance(fname, dict):
            for key, val in fname.items():
                if isinstance(val, list):  # list of files
                    fnames.update(val)
                else:
                    fnames.add(val)
        elif isinstance(fname, list):
            for f in fname:
                if f == oqparam.input_dir:
                    raise InvalidFile('%s there is an empty path in %s' %
                                      (oqparam.inputs['job_ini'], key))
            fnames.update(fname)
        elif key == 'source_model_logic_tree':
            info = logictree.collect_info(fname)
            fnames.update(info.smpaths)
            fnames.update(info.h5paths)
            fnames.add(fname)
        else:
            fnames.add(fname)
    return sorted(fnames)
Exemple #7
0
def reduce(fname, reduction_factor):
    """
    Produce a submodel from `fname` by sampling the nodes randomly.
    Supports source models, site models and exposure models. As a special
    case, it is also able to reduce .csv files by sampling the lines.
    This is a debugging utility to reduce large computations to small ones.
    """
    if fname.endswith('.csv'):
        with open(fname) as f:
            line = f.readline()  # read the first line
            if csv.Sniffer().has_header(line):
                header = line
                all_lines = f.readlines()
            else:
                header = None
                f.seek(0)
                all_lines = f.readlines()
        lines = general.random_filter(all_lines, reduction_factor)
        shutil.copy(fname, fname + '.bak')
        print('Copied the original file in %s.bak' % fname)
        _save_csv(fname, lines, header)
        print('Extracted %d lines out of %d' % (len(lines), len(all_lines)))
        return
    elif fname.endswith('.npy'):
        array = numpy.load(fname)
        shutil.copy(fname, fname + '.bak')
        print('Copied the original file in %s.bak' % fname)
        arr = numpy.array(general.random_filter(array, reduction_factor))
        numpy.save(fname, arr)
        print('Extracted %d rows out of %d' % (len(arr), len(array)))
        return
    node = nrml.read(fname)
    model = node[0]
    if model.tag.endswith('exposureModel'):
        total = len(model.assets)
        model.assets.nodes = general.random_filter(
            model.assets, reduction_factor)
        num_nodes = len(model.assets)
    elif model.tag.endswith('siteModel'):
        total = len(model)
        model.nodes = general.random_filter(model, reduction_factor)
        num_nodes = len(model)
    elif model.tag.endswith('sourceModel'):
        reduce_source_model(fname, reduction_factor)
        return
    elif model.tag.endswith('logicTree'):
        for smpath in logictree.collect_info(fname).smpaths:
            reduce_source_model(smpath, reduction_factor)
        return
    else:
        raise RuntimeError('Unknown model tag: %s' % model.tag)
    save_bak(fname, node, num_nodes, total)
Exemple #8
0
def reduce(fname, reduction_factor):
    """
    Produce a submodel from `fname` by sampling the nodes randomly.
    Supports source models, site models and exposure models. As a special
    case, it is also able to reduce .csv files by sampling the lines.
    This is a debugging utility to reduce large computations to small ones.
    """
    if fname.endswith('.csv'):
        with open(fname) as f:
            line = f.readline()  # read the first line
            if csv.Sniffer().has_header(line):
                header = line
                all_lines = f.readlines()
            else:
                header = None
                f.seek(0)
                all_lines = f.readlines()
        lines = general.random_filter(all_lines, reduction_factor)
        shutil.copy(fname, fname + '.bak')
        print('Copied the original file in %s.bak' % fname)
        _save_csv(fname, lines, header)
        print('Extracted %d lines out of %d' % (len(lines), len(all_lines)))
        return
    elif fname.endswith('.npy'):
        array = numpy.load(fname)
        shutil.copy(fname, fname + '.bak')
        print('Copied the original file in %s.bak' % fname)
        arr = numpy.array(general.random_filter(array, reduction_factor))
        numpy.save(fname, arr)
        print('Extracted %d rows out of %d' % (len(arr), len(array)))
        return
    node = nrml.read(fname)
    model = node[0]
    if model.tag.endswith('exposureModel'):
        total = len(model.assets)
        model.assets.nodes = general.random_filter(model.assets,
                                                   reduction_factor)
        num_nodes = len(model.assets)
    elif model.tag.endswith('siteModel'):
        total = len(model)
        model.nodes = general.random_filter(model, reduction_factor)
        num_nodes = len(model)
    elif model.tag.endswith('sourceModel'):
        reduce_source_model(fname, reduction_factor)
        return
    elif model.tag.endswith('logicTree'):
        for smpath in logictree.collect_info(fname).smpaths:
            reduce_source_model(smpath, reduction_factor)
        return
    else:
        raise RuntimeError('Unknown model tag: %s' % model.tag)
    save_bak(fname, node, num_nodes, total)
Exemple #9
0
def get_input_files(oqparam, hazard=False):
    """
    :param oqparam: an OqParam instance
    :param hazard: if True, consider only the hazard files
    :returns: input path names in a specific order
    """
    fnames = set()  # files entering in the checksum
    for key in oqparam.inputs:
        fname = oqparam.inputs[key]
        if hazard and key not in ('site_model', 'source_model_logic_tree',
                                  'gsim_logic_tree', 'source'):
            continue
        # collect .hdf5 tables for the GSIMs, if any
        elif key == 'gsim_logic_tree':
            gsim_lt = get_gsim_lt(oqparam)
            for gsims in gsim_lt.values.values():
                for gsim in gsims:
                    for k, v in gsim.kwargs.items():
                        if k.endswith(('_file', '_table')):
                            fnames.add(v)
            fnames.add(fname)
        elif key == 'source_model':  # UCERF
            f = oqparam.inputs['source_model']
            fnames.add(f)
            fname = nrml.read(f).sourceModel.UCERFSource['filename']
            fnames.add(os.path.join(os.path.dirname(f), fname))
        elif key == 'exposure':  # fname is a list
            for exp in asset.Exposure.read_headers(fname):
                fnames.update(exp.datafiles)
            fnames.update(fname)
        elif isinstance(fname, dict):
            fnames.update(fname.values())
        elif isinstance(fname, list):
            for f in fname:
                if f == oqparam.input_dir:
                    raise InvalidFile('%s there is an empty path in %s' %
                                      (oqparam.inputs['job_ini'], key))
            fnames.update(fname)
        elif key == 'source_model_logic_tree':
            for smpaths in logictree.collect_info(fname).smpaths.values():
                fnames.update(smpaths)
            fnames.add(fname)
        else:
            fnames.add(fname)
    return sorted(fnames)
Exemple #10
0
def get_input_files(oqparam, hazard=False):
    """
    :param oqparam: an OqParam instance
    :param hazard: if True, consider only the hazard files
    :returns: input path names in a specific order
    """
    fnames = []  # files entering in the checksum
    for key in oqparam.inputs:
        fname = oqparam.inputs[key]
        if hazard and key not in ('site_model', 'source_model_logic_tree',
                                  'gsim_logic_tree', 'source'):
            continue
        # collect .hdf5 tables for the GSIMs, if any
        elif key == 'gsim_logic_tree':
            gsim_lt = get_gsim_lt(oqparam)
            for gsims in gsim_lt.values.values():
                for gsim in gsims:
                    table = getattr(gsim, 'GMPE_TABLE', None)
                    if table:
                        fnames.append(table)
            fnames.append(fname)
        elif key == 'source_model':  # UCERF
            f = oqparam.inputs['source_model']
            fnames.append(f)
            fname = nrml.read(f).sourceModel.UCERFSource['filename']
            fnames.append(os.path.join(os.path.dirname(f), fname))
        elif key == 'exposure':  # fname is a list
            for exp in asset.Exposure.read_headers(fname):
                fnames.extend(exp.datafiles)
            fnames.extend(fname)
        elif isinstance(fname, dict):
            fnames.extend(fname.values())
        elif isinstance(fname, list):
            for f in fname:
                if f == oqparam.input_dir:
                    raise InvalidFile('%s there is an empty path in %s' %
                                      (oqparam.inputs['job_ini'], key))
            fnames.extend(fname)
        elif key == 'source_model_logic_tree':
            for smpaths in logictree.collect_info(fname).smpaths.values():
                fnames.extend(smpaths)
            fnames.append(fname)
        else:
            fnames.append(fname)
    return sorted(fnames)
Exemple #11
0
def get_params(job_inis, **kw):
    """
    Parse one or more INI-style config files.

    :param job_inis:
        List of configuration files (or list containing a single zip archive)
    :param kw:
        Optionally override some parameters
    :returns:
        A dictionary of parameters
    """
    input_zip = None
    if len(job_inis) == 1 and job_inis[0].endswith('.zip'):
        input_zip = job_inis[0]
        job_inis = extract_from_zip(
            job_inis[0],
            ['job_hazard.ini', 'job_haz.ini', 'job.ini', 'job_risk.ini'])

    not_found = [ini for ini in job_inis if not os.path.exists(ini)]
    if not_found:  # something was not found
        raise IOError('File not found: %s' % not_found[0])

    cp = configparser.ConfigParser()
    cp.read(job_inis)

    # directory containing the config files we're parsing
    job_ini = os.path.abspath(job_inis[0])
    base_path = decode(os.path.dirname(job_ini))
    params = dict(base_path=base_path, inputs={'job_ini': job_ini})
    if input_zip:
        params['inputs']['input_zip'] = os.path.abspath(input_zip)

    for sect in cp.sections():
        _update(params, cp.items(sect), base_path)
    _update(params, kw.items(), base_path)  # override on demand

    # populate the 'source' list
    inputs = params['inputs']
    smlt = inputs.get('source_model_logic_tree')
    if smlt:
        inputs['source'] = logictree.collect_info(smlt).smpaths
    elif 'source_model' in inputs:
        inputs['source'] = [inputs['source_model']]
    return params
Exemple #12
0
def reduce_source_model(smlt_file, source_ids, remove=True):
    """
    Extract sources from the composite source model
    """
    found = 0
    to_remove = []
    for paths in logictree.collect_info(smlt_file).smpaths.values():
        for path in paths:
            logging.info('Reading %s', path)
            root = nrml.read(path)
            model = Node('sourceModel', root[0].attrib)
            origmodel = root[0]
            if root['xmlns'] == 'http://openquake.org/xmlns/nrml/0.4':
                for src_node in origmodel:
                    if src_node['id'] in source_ids:
                        model.nodes.append(src_node)
            else:  # nrml/0.5
                for src_group in origmodel:
                    sg = copy.copy(src_group)
                    sg.nodes = []
                    weights = src_group.get('srcs_weights')
                    if weights:
                        assert len(weights) == len(src_group.nodes)
                    else:
                        weights = [1] * len(src_group.nodes)
                    src_group['srcs_weights'] = reduced_weigths = []
                    for src_node, weight in zip(src_group, weights):
                        if src_node['id'] in source_ids:
                            found += 1
                            sg.nodes.append(src_node)
                            reduced_weigths.append(weight)
                    if sg.nodes:
                        model.nodes.append(sg)
            shutil.copy(path, path + '.bak')
            if model:
                with open(path, 'wb') as f:
                    nrml.write([model], f, xmlns=root['xmlns'])
            elif remove:  # remove the files completely reduced
                to_remove.append(path)
    if found:
        for path in to_remove:
            os.remove(path)
Exemple #13
0
def zip_source_model(ssmLT, archive_zip='', log=logging.info):
    """
    Zip the source model files starting from the smmLT.xml file
    """
    basedir = os.path.dirname(ssmLT)
    if os.path.basename(ssmLT) != 'ssmLT.xml':
        orig = ssmLT
        ssmLT = os.path.join(basedir, 'ssmLT.xml')
        with open(ssmLT, 'wb') as f:
            f.write(open(orig, 'rb').read())

    archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip')
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT})
    checksum = readinput.get_checksum32(oq)
    checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt')
    with open(checkfile, 'w') as f:
        f.write(str(checksum))
    files = [os.path.abspath(ssmLT), os.path.abspath(checkfile)]
    for fs in logictree.collect_info(ssmLT).smpaths.values():
        files.extend(fs)
    general.zipfiles(files, archive_zip, log=log, cleanup=True)
    return archive_zip
Exemple #14
0
def renumber_sm(smlt_file):
    """
    Renumber the sources belonging to the same source model, even if split
    in multiple files, to avoid duplicated source IDs. NB: it changes the
    XML files in place, without making a backup, so be careful.
    """
    logging.basicConfig(level=logging.INFO)
    smpaths = logictree.collect_info(smlt_file).smpaths
    smap = parallel.Starmap(read_sm, [(path, ) for path in smpaths])
    smodel, srcs = {}, []
    for sm, fname, sources in smap:
        smodel[fname] = sm
        srcs.extend(sources)
    parallel.Starmap.shutdown()
    dic = general.groupby(srcs, operator.attrgetter('value'))
    n = 1
    for sources in dic.values():
        for src in sources:
            src.node['id'] = str(n)
        n += 1
    for fname, root in smodel.items():
        logging.info('Saving %s', fname)
        with open(fname, 'wb') as f:
            nrml.write(root, f, xmlns=root['xmlns'])
Exemple #15
0
def reduce_source_model(smlt_file, source_ids, remove=True):
    """
    Extract sources from the composite source model.

    :param smlt_file: path to a source model logic tree file
    :param source_ids: dictionary source_id -> records (src_id, code)
    :param remove: if True, remove sm.xml files containing no sources
    :returns: the number of sources satisfying the filter vs the total
    """
    if isinstance(source_ids, dict):  # in oq reduce_sm
        def ok(src_node):
            code = tag2code[re.search(r'\}(\w\w)', src_node.tag).group(1)]
            arr = source_ids.get(src_node['id'])
            if arr is None:
                return False
            return (arr['code'] == code).any()
    else:  # list of source IDs, in extract_source
        def ok(src_node):
            return src_node['id'] in source_ids

    good, total = 0, 0
    to_remove = set()
    for paths in logictree.collect_info(smlt_file).smpaths.values():
        for path in paths:
            logging.info('Reading %s', path)
            root = nrml.read(path)
            model = Node('sourceModel', root[0].attrib)
            origmodel = root[0]
            if root['xmlns'] == 'http://openquake.org/xmlns/nrml/0.4':
                for src_node in origmodel:
                    total += 1
                    if ok(src_node):
                        good += 1
                        model.nodes.append(src_node)
            else:  # nrml/0.5
                for src_group in origmodel:
                    sg = copy.copy(src_group)
                    sg.nodes = []
                    weights = src_group.get('srcs_weights')
                    if weights:
                        assert len(weights) == len(src_group.nodes)
                    else:
                        weights = [1] * len(src_group.nodes)
                    src_group['srcs_weights'] = reduced_weigths = []
                    for src_node, weight in zip(src_group, weights):
                        total += 1
                        if ok(src_node):
                            good += 1
                            sg.nodes.append(src_node)
                            reduced_weigths.append(weight)
                    if sg.nodes:
                        model.nodes.append(sg)
            shutil.copy(path, path + '.bak')
            if model:
                with open(path, 'wb') as f:
                    nrml.write([model], f, xmlns=root['xmlns'])
            elif remove:  # remove the files completely reduced
                to_remove.add(path)
    if good:
        for path in to_remove:
            os.remove(path)
    return good, total
Exemple #16
0
def main(what, report=False):
    """
    Give information about the passed keyword or filename
    """
    if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):
        os.environ['OQ_DISTRIBUTE'] = 'processpool'
    if what == 'calculators':
        for calc in sorted(base.calculators):
            print(calc)
    elif what == 'gsims':
        for gs in gsim.get_available_gsims():
            print(gs)
    elif what == 'imts':
        for im in gen_subclasses(imt.IMT):
            print(im.__name__)
    elif what == 'views':
        for name in sorted(view):
            print(name)
    elif what == 'exports':
        dic = groupby(export, operator.itemgetter(0),
                      lambda group: [r[1] for r in group])
        n = 0
        for exporter, formats in dic.items():
            print(exporter, formats)
            n += len(formats)
        print('There are %d exporters defined.' % n)
    elif what == 'extracts':
        for key in extract:
            func = extract[key]
            if hasattr(func, '__wrapped__'):
                fm = FunctionMaker(func.__wrapped__)
            elif hasattr(func, 'func'):  # for partial objects
                fm = FunctionMaker(func.func)
            else:
                fm = FunctionMaker(func)
            print('%s(%s)%s' % (fm.name, fm.signature, fm.doc))
    elif what == 'parameters':
        params = []
        for val in vars(OqParam).values():
            if hasattr(val, 'name'):
                params.append(val)
        params.sort(key=lambda x: x.name)
        for param in params:
            print(param.name)
    elif what == 'mfds':
        for cls in gen_subclasses(BaseMFD):
            print(cls.__name__)
    elif what == 'sources':
        for cls in gen_subclasses(BaseSeismicSource):
            print(cls.__name__)
    elif os.path.isdir(what) and report:
        with Monitor('info', measuremem=True) as mon:
            with mock.patch.object(logging.root, 'info'):  # reduce logging
                do_build_reports(what)
        print(mon)
    elif what.endswith('.xml'):
        node = nrml.read(what)
        if node[0].tag.endswith('sourceModel'):
            print(source_model_info([node]))
        elif node[0].tag.endswith('logicTree'):
            sm_nodes = []
            for smpath in logictree.collect_info(what).smpaths:
                sm_nodes.append(nrml.read(smpath))
            print(source_model_info(sm_nodes))
        else:
            print(node.to_str())
    elif what.endswith(('.ini', '.zip')):
        with Monitor('info', measuremem=True) as mon:
            if report:
                print('Generated', reportwriter.build_report(what))
            else:
                print(readinput.get_oqparam(what).json())
        if mon.duration > 1:
            print(mon)
    elif what:
        print("No info for '%s'" % what)
Exemple #17
0
def main(what, report=False):
    """
    Give information about the passed keyword or filename
    """
    if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):
        os.environ['OQ_DISTRIBUTE'] = 'processpool'
    if what == 'calculators':
        for calc in sorted(base.calculators):
            print(calc)
    elif what == 'gsims':
        for gs in gsim.get_available_gsims():
            print(gs)
    elif what == 'portable_gsims':
        for gs in gsim.get_portable_gsims():
            print(gs)
    elif what == 'imts':
        for im in vars(imt).values():
            if inspect.isfunction(im) and is_upper(im):
                print(im.__name__)
    elif what == 'views':
        for name in sorted(view):
            print(name)
    elif what == 'exports':
        dic = groupby(export, operator.itemgetter(0),
                      lambda group: [r[1] for r in group])
        items = [(DISPLAY_NAME.get(exporter, '?'), exporter, formats)
                 for exporter, formats in dic.items()]
        n = 0
        for dispname, exporter, formats in sorted(items):
            print(dispname, '"%s"' % exporter, formats)
            n += len(formats)
        print('There are %d exporters defined.' % n)
    elif what == 'extracts':
        for key in extract:
            func = extract[key]
            if hasattr(func, '__wrapped__'):
                fm = FunctionMaker(func.__wrapped__)
            elif hasattr(func, 'func'):  # for partial objects
                fm = FunctionMaker(func.func)
            else:
                fm = FunctionMaker(func)
            print('%s(%s)%s' % (fm.name, fm.signature, fm.doc))
    elif what == 'parameters':
        docs = OqParam.docs()
        names = set()
        for val in vars(OqParam).values():
            if hasattr(val, 'name'):
                names.add(val.name)
        params = sorted(names)
        for param in params:
            print(param)
            print(docs[param])
    elif what == 'mfds':
        for cls in gen_subclasses(BaseMFD):
            print(cls.__name__)
    elif what == 'venv':
        print(sys.prefix)
    elif what == 'sources':
        for cls in gen_subclasses(BaseSeismicSource):
            print(cls.__name__)
    elif what == 'consequences':
        known = scientific.KNOWN_CONSEQUENCES
        print('The following %d consequences are implemented:' % len(known))
        for cons in known:
            print(cons)
    elif os.path.isdir(what) and report:
        with Monitor('info', measuremem=True) as mon:
            with mock.patch.object(logging.root, 'info'):  # reduce logging
                do_build_reports(what)
        print(mon)
    elif what.endswith('.xml'):
        node = nrml.read(what)
        if node[0].tag.endswith('sourceModel'):
            print(source_model_info([node]))
        elif node[0].tag.endswith('logicTree'):
            bset = node[0][0]
            if bset.tag.endswith("logicTreeBranchingLevel"):
                bset = bset[0]
            if bset.attrib['uncertaintyType'] == 'sourceModel':
                sm_nodes = []
                for smpath in logictree.collect_info(what).smpaths:
                    sm_nodes.append(nrml.read(smpath))
                print(source_model_info(sm_nodes))
            elif bset.attrib['uncertaintyType'] == 'gmpeModel':
                print(logictree.GsimLogicTree(what))
        else:
            print(node.to_str())
    elif what.endswith(('.ini', '.zip')):
        with Monitor('info', measuremem=True) as mon:
            if report:
                print('Generated', reportwriter.build_report(what))
            else:
                print(readinput.get_oqparam(what).json())
        if mon.duration > 1:
            print(mon)
    elif what:
        print("No info for '%s'" % what)
Exemple #18
0
def info(calculators, gsims, views, exports, extracts, parameters,
         report, input_file=''):
    """
    Give information. You can pass the name of an available calculator,
    a job.ini file, or a zip archive with the input files.
    """
    if calculators:
        for calc in sorted(base.calculators):
            print(calc)
    if gsims:
        for gs in gsim.get_available_gsims():
            print(gs)
    if views:
        for name in sorted(view):
            print(name)
    if exports:
        dic = groupby(export, operator.itemgetter(0),
                      lambda group: [r[1] for r in group])
        n = 0
        for exporter, formats in dic.items():
            print(exporter, formats)
            n += len(formats)
        print('There are %d exporters defined.' % n)
    if extracts:
        for key in extract:
            func = extract[key]
            if hasattr(func, '__wrapped__'):
                fm = FunctionMaker(func.__wrapped__)
            else:
                fm = FunctionMaker(func)
            print('%s(%s)%s' % (fm.name, fm.signature, fm.doc))
    if parameters:
        params = []
        for val in vars(OqParam).values():
            if hasattr(val, 'name'):
                params.append(val)
        params.sort(key=lambda x: x.name)
        for param in params:
            print(param.name)
    if os.path.isdir(input_file) and report:
        with Monitor('info', measuremem=True) as mon:
            with mock.patch.object(logging.root, 'info'):  # reduce logging
                do_build_reports(input_file)
        print(mon)
    elif input_file.endswith('.xml'):
        node = nrml.read(input_file)
        if node[0].tag.endswith('sourceModel'):
            if node['xmlns'].endswith('nrml/0.4'):
                raise InvalidFile(
                    '%s is in NRML 0.4 format, please run the following '
                    'command:\noq upgrade_nrml %s' % (
                        input_file, os.path.dirname(input_file) or '.'))
            print(source_model_info([node[0]]))
        elif node[0].tag.endswith('logicTree'):
            nodes = [nrml.read(sm_path)[0]
                     for sm_path in logictree.collect_info(input_file).smpaths]
            print(source_model_info(nodes))
        else:
            print(node.to_str())
    elif input_file.endswith(('.ini', '.zip')):
        with Monitor('info', measuremem=True) as mon:
            if report:
                print('Generated', reportwriter.build_report(input_file))
            else:
                print_csm_info(input_file)
        if mon.duration > 1:
            print(mon)
    elif input_file:
        print("No info for '%s'" % input_file)
Exemple #19
0
def info(calculators, gsims, views, exports, extracts, report, input_file=''):
    """
    Give information. You can pass the name of an available calculator,
    a job.ini file, or a zip archive with the input files.
    """
    if calculators:
        for calc in sorted(base.calculators):
            print(calc)
    if gsims:
        for gs in gsim.get_available_gsims():
            print(gs)
    if views:
        for name in sorted(view):
            print(name)
    if exports:
        dic = groupby(export, operator.itemgetter(0),
                      lambda group: [r[1] for r in group])
        n = 0
        for exporter, formats in dic.items():
            print(exporter, formats)
            n += len(formats)
        print('There are %d exporters defined.' % n)
    if extracts:
        for key in extract:
            func = extract[key]
            if hasattr(func, '__wrapped__'):
                fm = FunctionMaker(func.__wrapped__)
            else:
                fm = FunctionMaker(func)
            print('%s(%s)%s' % (fm.name, fm.signature, fm.doc))
    if os.path.isdir(input_file) and report:
        with Monitor('info', measuremem=True) as mon:
            with mock.patch.object(logging.root, 'info'):  # reduce logging
                do_build_reports(input_file)
        print(mon)
    elif input_file.endswith('.xml'):
        node = nrml.read(input_file)
        if node[0].tag.endswith('sourceModel'):
            if node['xmlns'].endswith('nrml/0.4'):
                raise InvalidFile(
                    '%s is in NRML 0.4 format, please run the following '
                    'command:\noq upgrade_nrml %s' %
                    (input_file, os.path.dirname(input_file) or '.'))
            print(source_model_info([node[0]]))
        elif node[0].tag.endswith('logicTree'):
            nodes = [
                nrml.read(sm_path)[0]
                for sm_path in logictree.collect_info(input_file).smpaths
            ]
            print(source_model_info(nodes))
        else:
            print(node.to_str())
    elif input_file.endswith(('.ini', '.zip')):
        with Monitor('info', measuremem=True) as mon:
            if report:
                print('Generated', reportwriter.build_report(input_file))
            else:
                print_csm_info(input_file)
        if mon.duration > 1:
            print(mon)
    elif input_file:
        print("No info for '%s'" % input_file)