def test_get_available_gsims(self): with mock.patch('os.listdir') as mock_listdir: # returns some file names mock_listdir.return_value = [ '__init__.py', 'base.py', 'atkinson_boore_2006.py', 'zhao_2006.py', 'README.txt'] with mock.patch('importlib.import_module', fake_import): assert_equal(list(get_available_gsims().keys()), ['AtkinsonBoore2006', 'BooreAtkinson2008']) assert_equal(list(get_available_gsims().values()), [FakeModule.AtkinsonBoore2006, FakeModule.BooreAtkinson2008])
def test_compute_gmf(self): hc = mock.Mock() hc.ground_motion_correlation_model = None hc.truncation_level = None hc.maximum_distance = 200. gsim = get_available_gsims()['AkkarBommer2010']() site_coll = make_site_coll(-78, 15.5, n=5) params = dict(truncation_level=3, correl_model=None, maximum_distance=200) trt = 'Subduction Interface' rupture_ids = range(2) ruptures = [FakeRupture(i, trt) for i in rupture_ids] rupture_seeds = rupture_ids gmv_dict, rup_dict = core._compute_gmf( params, PGA(), {trt: gsim}, site_coll, ruptures, rupture_seeds) expected_rups = { 0: rupture_ids, 1: rupture_ids, 2: rupture_ids, 3: rupture_ids, 4: rupture_ids, } expected_gmvs = { 0: [0.122149047040728, 0.0813899249039753], 1: [0.0541662667863476, 0.02136369236082], 2: [0.0772246502768338, 0.0226182956091826], 3: [0.166062666449449, 0.0164127269047494], 4: [0.133588538354143, 0.0529987707352876] } numpy.testing.assert_equal(rup_dict, expected_rups) for i, gmvs in expected_gmvs.iteritems(): numpy.testing.assert_allclose(gmvs, expected_gmvs[i])
def info(calculators, gsims, views, exports, report, input_file=''): """ Give information. You can pass the name of an available calculator, a job.ini file, or a zip archive with the input files. """ logging.basicConfig(level=logging.INFO) if calculators: for calc in sorted(base.calculators): print(calc) if gsims: for gs in gsim.get_available_gsims(): print(gs) if views: for name in sorted(datastore.view): print(name) if exports: dic = groupby(export, operator.itemgetter(0), lambda group: [r[1] for r in group]) n = 0 for exporter, formats in dic.items(): print(exporter, formats) n += len(formats) print('There are %d exporters defined.' % n) if input_file.endswith('.xml'): print(nrml.read(input_file).to_str()) elif input_file.endswith(('.ini', '.zip')): with Monitor('info', measuremem=True) as mon: if report: print('Generated', reportwriter.build_report(input_file)) else: print_csm_info(input_file) if mon.duration > 1: print(mon) elif input_file: print("No info for '%s'" % input_file)
def check_gmpe_attributes(gmpe): """ Details ------- Checks the attributes for gmpe. Parameters ---------- gmpe : str gmpe name for which attributes going to be checked Returns ------- None. """ from openquake.hazardlib import gsim bgmpe = gsim.get_available_gsims()[gmpe]() print('GMPE name: %s' % gmpe) print('Supported tectonic region: %s' % bgmpe.DEFINED_FOR_TECTONIC_REGION_TYPE) print( 'Supported standard deviation: %s' % ', '.join([std for std in bgmpe.DEFINED_FOR_STANDARD_DEVIATION_TYPES])) print('Supported intensity measure: %s' % ', '.join( [imt.__name__ for imt in bgmpe.DEFINED_FOR_INTENSITY_MEASURE_TYPES])) print('Supported intensity measure component: %s' % bgmpe.DEFINED_FOR_INTENSITY_MEASURE_COMPONENT) try: sa_keys = list(bgmpe.COEFFS.sa_coeffs.keys()) print('Supported SA period range: %s' % ' - '.join([str(sa_keys[0].period), str(sa_keys[-1].period)])) except: pass print('Required distance parameters: %s' % ', '.join([dist for dist in bgmpe.REQUIRES_DISTANCES])) print('Required rupture parameters: %s' % ', '.join([rup for rup in bgmpe.REQUIRES_RUPTURE_PARAMETERS])) print('Required site parameters: %s' % ', '.join([site for site in bgmpe.REQUIRES_SITES_PARAMETERS]))
def _info(name, filtersources, weightsources): if name in base.calculators: print(textwrap.dedent(base.calculators[name].__doc__.strip())) elif name == 'gsims': for gs in gsim.get_available_gsims(): print(gs) elif name.endswith('.xml'): print(nrml.read(name).to_str()) elif name.endswith(('.ini', '.zip')): oqparam = readinput.get_oqparam(name) if 'exposure' in oqparam.inputs: expo = readinput.get_exposure(oqparam) sitecol, assets_by_site = readinput.get_sitecol_assets( oqparam, expo) elif filtersources or weightsources: sitecol = readinput.get_site_collection(oqparam) else: sitecol = None if 'source_model_logic_tree' in oqparam.inputs: print('Reading the source model...') if weightsources: sp = source.SourceFilterWeighter elif filtersources: sp = source.SourceFilter else: sp = source.BaseSourceProcessor # do nothing csm = readinput.get_composite_source_model(oqparam, sitecol, sp) assoc = csm.get_rlzs_assoc() dstore = datastore.Fake(vars(oqparam), rlzs_assoc=assoc, composite_source_model=csm, sitecol=sitecol) _print_info(dstore, filtersources, weightsources) else: print("No info for '%s'" % name)
def gsims(cls): '''Returns a (new) dict of: gsim_name (string) mapped to its gsim_class (class object) defining all Ground Shaking Intensity Models defined in OpenQuake. The dict is sorted by gsim_name ''' return get_available_gsims() # already returns a new dict
def info(calculators, gsims, views, exports, extracts, report, input_file=''): """ Give information. You can pass the name of an available calculator, a job.ini file, or a zip archive with the input files. """ logging.basicConfig(level=logging.INFO) if calculators: for calc in sorted(base.calculators): print(calc) if gsims: for gs in gsim.get_available_gsims(): print(gs) if views: for name in sorted(view): print(name) if exports: dic = groupby(export, operator.itemgetter(0), lambda group: [r[1] for r in group]) n = 0 for exporter, formats in dic.items(): print(exporter, formats) n += len(formats) print('There are %d exporters defined.' % n) if extracts: for key in extract: func = extract[key] if hasattr(func, '__wrapped__'): fm = FunctionMaker(func.__wrapped__) else: fm = FunctionMaker(func) print('%s(%s)%s' % (fm.name, fm.signature, fm.doc)) if os.path.isdir(input_file) and report: with Monitor('info', measuremem=True) as mon: with mock.patch.object(logging.root, 'info'): # reduce logging do_build_reports(input_file) print(mon) elif input_file.endswith('.xml'): node = nrml.read(input_file) if node[0].tag.endswith('sourceModel'): if node['xmlns'].endswith('nrml/0.4'): raise InvalidFile( '%s is in NRML 0.4 format, please run the following ' 'command:\noq upgrade_nrml %s' % (input_file, os.path.dirname(input_file) or '.')) print(source_model_info(node[0])) else: print(node.to_str()) elif input_file.endswith(('.ini', '.zip')): with Monitor('info', measuremem=True) as mon: if report: print('Generated', reportwriter.build_report(input_file)) else: print_csm_info(input_file) if mon.duration > 1: print(mon) elif input_file: print("No info for '%s'" % input_file)
def test_compute_gmf(self): hc = mock.Mock() hc.ground_motion_correlation_model = None hc.truncation_level = None hc.maximum_distance = 200. trt = 'Subduction Interface' gsim = get_available_gsims()['AkkarBommer2010']() num_sites = 5 site_coll = make_site_coll(-78, 15.5, num_sites) rup_id, rup_seed = 42, 44 rup = FakeRupture(rup_id, trt) pga = PGA() rlz = mock.Mock() rlz.id = 1 calc = core.GmfCalculator( [pga], [gsim], trt_model_id=1, truncation_level=3) calc.calc_gmfs(site_coll, rup, [(rup.id, rup_seed)]) expected_rups = { ('AkkarBommer2010', 'PGA', 0): [rup_id], ('AkkarBommer2010', 'PGA', 1): [rup_id], ('AkkarBommer2010', 'PGA', 2): [rup_id], ('AkkarBommer2010', 'PGA', 3): [rup_id], ('AkkarBommer2010', 'PGA', 4): [rup_id], } expected_gmvs = { ('AkkarBommer2010', 'PGA', 0): [0.1027847118266612], ('AkkarBommer2010', 'PGA', 1): [0.02726361912605336], ('AkkarBommer2010', 'PGA', 2): [0.0862595971325641], ('AkkarBommer2010', 'PGA', 3): [0.04727148908077005], ('AkkarBommer2010', 'PGA', 4): [0.04750575818347277], } numpy.testing.assert_equal(calc.ruptures_per_site, expected_rups) for i, gmvs in expected_gmvs.iteritems(): numpy.testing.assert_allclose(gmvs, expected_gmvs[i]) # 5 curves (one per each site) for 3 levels, 1 IMT [(gname, [curves])] = calc.to_haz_curves( site_coll.sids, dict(PGA=[0.03, 0.04, 0.05]), invest_time=50., num_ses=10) self.assertEqual(gname, 'AkkarBommer2010') numpy.testing.assert_array_almost_equal( curves, [[0.09516258, 0.09516258, 0.09516258], # curve site1 [0.00000000, 0.00000000, 0.00000000], # curve site2 [0.09516258, 0.09516258, 0.09516258], # curve site3 [0.09516258, 0.09516258, 0.00000000], # curve site4 [0.09516258, 0.09516258, 0.00000000], # curve site5 ])
def info(calculators, gsims, views, exports, report, input_file=''): """ Give information. You can pass the name of an available calculator, a job.ini file, or a zip archive with the input files. """ logging.basicConfig(level=logging.INFO) if calculators: for calc in sorted(base.calculators): print(calc) if gsims: for gs in gsim.get_available_gsims(): print(gs) if views: for name in sorted(view): print(name) if exports: dic = groupby(export, operator.itemgetter(0), lambda group: [r[1] for r in group]) n = 0 for exporter, formats in dic.items(): print(exporter, formats) n += len(formats) print('There are %d exporters defined.' % n) if os.path.isdir(input_file) and report: with Monitor('info', measuremem=True) as mon: with mock.patch.object(logging.root, 'info'): # reduce logging do_build_reports(input_file) print(mon) elif input_file.endswith('.xml'): node = nrml.read(input_file) if node[0].tag.endswith('sourceModel'): assert node['xmlns'].endswith('nrml/0.5'), node['xmlns'] print(source_model_info(node[0])) else: print(node.to_str()) elif input_file.endswith(('.ini', '.zip')): with Monitor('info', measuremem=True) as mon: if report: print('Generated', reportwriter.build_report(input_file)) else: print_csm_info(input_file) if mon.duration > 1: print(mon) elif input_file: print("No info for '%s'" % input_file)
def _info(name, filtersources, weightsources): if name in base.calculators: print(textwrap.dedent(base.calculators[name].__doc__.strip())) elif name == 'gsims': for gs in gsim.get_available_gsims(): print(gs) elif name.endswith('.xml'): print(nrml.read(name).to_str()) elif name.endswith(('.ini', '.zip')): oqparam = readinput.get_oqparam(name) if 'exposure' in oqparam.inputs: expo = readinput.get_exposure(oqparam) sitecol, assets_by_site = readinput.get_sitecol_assets( oqparam, expo) elif filtersources or weightsources: sitecol, assets_by_site = readinput.get_site_collection( oqparam), [] else: sitecol, assets_by_site = None, [] if 'source_model_logic_tree' in oqparam.inputs: print('Reading the source model...') if weightsources: sp = source.SourceFilterWeighter elif filtersources: sp = source.SourceFilter else: sp = source.BaseSourceProcessor # do nothing csm = readinput.get_composite_source_model(oqparam, sitecol, sp) assoc = csm.get_rlzs_assoc() dstore = datastore.Fake(vars(oqparam), rlzs_assoc=assoc, composite_source_model=csm, sitecol=sitecol) _print_info(dstore, filtersources, weightsources) if len(assets_by_site): assetcol = riskinput.build_asset_collection(assets_by_site) dic = groupby(assetcol, operator.attrgetter('taxonomy')) for taxo, num in dic.items(): print('taxonomy #%d, %d assets' % (taxo, num)) print('total assets = %d' % len(assetcol)) else: print("No info for '%s'" % name)
def _info(name, filtersources, weightsources): if name in base.calculators: print(textwrap.dedent(base.calculators[name].__doc__.strip())) elif name == 'gsims': for gs in gsim.get_available_gsims(): print(gs) elif name.endswith('.xml'): print(nrml.read(name).to_str()) elif name.endswith(('.ini', '.zip')): oqparam = readinput.get_oqparam(name) if 'exposure' in oqparam.inputs: expo = readinput.get_exposure(oqparam) sitecol, assets_by_site = readinput.get_sitecol_assets( oqparam, expo) elif filtersources or weightsources: sitecol, assets_by_site = readinput.get_site_collection( oqparam), [] else: sitecol, assets_by_site = None, [] if 'source_model_logic_tree' in oqparam.inputs: print('Reading the source model...') if weightsources: sp = source.SourceFilterWeighter elif filtersources: sp = source.SourceFilter else: sp = source.BaseSourceProcessor # do nothing csm = readinput.get_composite_source_model(oqparam, sitecol, sp) assoc = csm.get_rlzs_assoc() _print_info( dict(rlzs_assoc=assoc, oqparam=oqparam, composite_source_model=csm, sitecol=sitecol), filtersources, weightsources) if len(assets_by_site): assetcol = riskinput.build_asset_collection(assets_by_site) dic = groupby(assetcol, operator.attrgetter('taxonomy')) for taxo, num in dic.items(): print('taxonomy #%d, %d assets' % (taxo, num)) print('total assets = %d' % len(assetcol)) else: print("No info for '%s'" % name)
def test_compute_gmf(self): hc = mock.Mock() hc.ground_motion_correlation_model = None hc.truncation_level = None hc.maximum_distance = 200. trt = 'Subduction Interface' gsim = get_available_gsims()['AkkarBommer2010']() num_sites = 5 site_coll = make_site_coll(-78, 15.5, num_sites) params = dict(truncation_level=3, correl_model=None, maximum_distance=200, num_sites=num_sites) rup_id, rup_seed = 42, 44 rup = FakeRupture(rup_id, trt) pga = PGA() rlz = mock.Mock() rlz.id = 1 coll = core.GmfCalculator( params, [pga], [gsim], trt_model_id=1, task_no=0) rdata = core.RuptureData(site_coll, rup, [(rup.id, rup_seed)]) coll.calc_gmfs([rdata]) expected_rups = { ('AkkarBommer2010', pga, 0): [rup_id], ('AkkarBommer2010', pga, 1): [rup_id], ('AkkarBommer2010', pga, 2): [rup_id], ('AkkarBommer2010', pga, 3): [rup_id], ('AkkarBommer2010', pga, 4): [rup_id], } expected_gmvs = { ('AkkarBommer2010', pga, 0): [0.1027847118266612], ('AkkarBommer2010', pga, 1): [0.02726361912605336], ('AkkarBommer2010', pga, 2): [0.0862595971325641], ('AkkarBommer2010', pga, 3): [0.04727148908077005], ('AkkarBommer2010', pga, 4): [0.04750575818347277], } numpy.testing.assert_equal(coll.ruptures_per_site, expected_rups) for i, gmvs in expected_gmvs.iteritems(): numpy.testing.assert_allclose(gmvs, expected_gmvs[i])
def info(name, report=False): """ Give information. You can pass the name of an available calculator, a job.ini file, or a zip archive with the input files. """ logging.basicConfig(level=logging.INFO) if name in base.calculators: print(textwrap.dedent(base.calculators[name].__doc__.strip())) elif name == 'gsims': for gs in gsim.get_available_gsims(): print(gs) elif name.endswith('.xml'): print(nrml.read(name).to_str()) elif name.endswith(('.ini', '.zip')): with Monitor('info', measuremem=True) as mon: if report: print('Generated', reportwriter.build_report(name)) else: print_csm_info(name) if mon.duration > 1: print(mon) else: print("No info for '%s'" % name)
def get_available_gmpes(): """ Details ------- Retrieves available ground motion prediction equations (gmpe) in OpenQuake. Parameters ---------- None. Returns ------- gmpes : dict Dictionary which contains available gmpes in openquake. """ from openquake.hazardlib import gsim gmpes = {} for name, gmpe in gsim.get_available_gsims().items(): gmpes[name] = gmpe return gmpes
def _info(name, filtersources, weightsources): if name in base.calculators: print(textwrap.dedent(base.calculators[name].__doc__.strip())) elif name == "gsims": for gs in gsim.get_available_gsims(): print(gs) elif name.endswith(".xml"): print(nrml.read(name).to_str()) elif name.endswith((".ini", ".zip")): oqparam = readinput.get_oqparam(name) if "exposure" in oqparam.inputs: expo = readinput.get_exposure(oqparam) sitecol, assets_by_site = readinput.get_sitecol_assets(oqparam, expo) elif filtersources or weightsources: sitecol, assets_by_site = readinput.get_site_collection(oqparam), [] else: sitecol, assets_by_site = None, [] if "source_model_logic_tree" in oqparam.inputs: print("Reading the source model...") if weightsources: sp = source.SourceFilterWeighter elif filtersources: sp = source.SourceFilter else: sp = source.BaseSourceProcessor # do nothing csm = readinput.get_composite_source_model(oqparam, sitecol, sp) assoc = csm.get_rlzs_assoc() dstore = datastore.Fake(vars(oqparam), rlzs_assoc=assoc, composite_source_model=csm, sitecol=sitecol) _print_info(dstore, filtersources, weightsources) if len(assets_by_site): assetcol = riskinput.build_asset_collection(assets_by_site) dic = groupby(assetcol, operator.attrgetter("taxonomy")) for taxo, num in dic.items(): print("taxonomy #%d, %d assets" % (taxo, num)) print("total assets = %d" % len(assetcol)) else: print("No info for '%s'" % name)
def _info(name, filtersources, weightsources): if name in base.calculators: print(textwrap.dedent(base.calculators[name].__doc__.strip())) elif name == 'gsims': for gs in gsim.get_available_gsims(): print(gs) elif name.endswith('.xml'): print(nrml.read(name).to_str()) elif name.endswith(('.ini', '.zip')): oqparam = readinput.get_oqparam(name) if 'exposure' in oqparam.inputs: expo = readinput.get_exposure(oqparam) sitecol, assets_by_site = readinput.get_sitecol_assets( oqparam, expo) elif filtersources or weightsources: sitecol, assets_by_site = readinput.get_site_collection( oqparam), [] else: sitecol, assets_by_site = None, [] if 'source_model_logic_tree' in oqparam.inputs: print('Reading the source model...') if weightsources: sp = source.SourceFilterWeighter elif filtersources: sp = source.SourceFilter else: sp = source.BaseSourceProcessor # do nothing csm = readinput.get_composite_source_model(oqparam, sitecol, sp) assoc = csm.get_rlzs_assoc() _print_info(assoc, oqparam, csm, sitecol, filtersources, weightsources) if len(assets_by_site): print('assets = %d' % sum(len(assets) for assets in assets_by_site)) else: print("No info for '%s'" % name)
from shapely import wkt from openquake.hazardlib.geo.point import Point from openquake.hazardlib.geo.surface import PlanarSurface from openquake.hazardlib.geo.geodetic import geodetic_distance from openquake.hazardlib.correlation import JB2009CorrelationModel from openquake.hazardlib.site import Site, SiteCollection from openquake.hazardlib.imt import from_string from openquake.hazardlib.gsim import get_available_gsims from openquake.hazardlib.gsim.base import ContextMaker from openquake.hazardlib.sourceconverter import RuptureConverter from openquake.hazardlib import nrml from smtk.residuals.gmpe_residuals import Residuals DEFAULT_CORRELATION = JB2009CorrelationModel(False) GSIM_LIST = get_available_gsims() def build_planar_surface(geometry): """ Builds the planar rupture surface from the openquake.nrmllib.models instance """ # Read geometry from wkt geom = wkt.loads(geometry.wkt) top_left = Point(geom.xy[0][0], geom.xy[1][0], geometry.upper_seismo_depth) top_right = Point(geom.xy[0][1], geom.xy[1][1], geometry.upper_seismo_depth)
# under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. from openquake.hazardlib.gsim import get_available_gsims from openquake.commonlib import valid GSIMS = get_available_gsims() GROUND_MOTION_CORRELATION_MODELS = ['JB2009', 'Jayaram-Baker 2009'] HAZARD_CALCULATORS = ['classical', 'disaggregation', 'event_based', 'scenario'] RISK_CALCULATORS = [ 'classical_risk', 'event_based_risk', 'scenario_risk', 'classical_bcr', 'event_based_bcr', 'scenario_damage' ] EXPERIMENTAL_CALCULATORS = ['event_based_fr'] CALCULATORS = HAZARD_CALCULATORS + RISK_CALCULATORS + EXPERIMENTAL_CALCULATORS
def main(what, report=False): """ Give information about the passed keyword or filename """ if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'): os.environ['OQ_DISTRIBUTE'] = 'processpool' if what == 'calculators': for calc in sorted(base.calculators): print(calc) elif what == 'gsims': for gs in gsim.get_available_gsims(): print(gs) elif what == 'portable_gsims': for gs in gsim.get_portable_gsims(): print(gs) elif what == 'imts': for im in vars(imt).values(): if inspect.isfunction(im) and is_upper(im): print(im.__name__) elif what == 'views': for name in sorted(view): print(name) elif what == 'exports': dic = groupby(export, operator.itemgetter(0), lambda group: [r[1] for r in group]) items = [(DISPLAY_NAME.get(exporter, '?'), exporter, formats) for exporter, formats in dic.items()] n = 0 for dispname, exporter, formats in sorted(items): print(dispname, '"%s"' % exporter, formats) n += len(formats) print('There are %d exporters defined.' % n) elif what == 'extracts': for key in extract: func = extract[key] if hasattr(func, '__wrapped__'): fm = FunctionMaker(func.__wrapped__) elif hasattr(func, 'func'): # for partial objects fm = FunctionMaker(func.func) else: fm = FunctionMaker(func) print('%s(%s)%s' % (fm.name, fm.signature, fm.doc)) elif what == 'parameters': docs = OqParam.docs() names = set() for val in vars(OqParam).values(): if hasattr(val, 'name'): names.add(val.name) params = sorted(names) for param in params: print(param) print(docs[param]) elif what == 'mfds': for cls in gen_subclasses(BaseMFD): print(cls.__name__) elif what == 'venv': print(sys.prefix) elif what == 'sources': for cls in gen_subclasses(BaseSeismicSource): print(cls.__name__) elif what == 'consequences': known = scientific.KNOWN_CONSEQUENCES print('The following %d consequences are implemented:' % len(known)) for cons in known: print(cons) elif os.path.isdir(what) and report: with Monitor('info', measuremem=True) as mon: with mock.patch.object(logging.root, 'info'): # reduce logging do_build_reports(what) print(mon) elif what.endswith('.xml'): node = nrml.read(what) if node[0].tag.endswith('sourceModel'): print(source_model_info([node])) elif node[0].tag.endswith('logicTree'): bset = node[0][0] if bset.tag.endswith("logicTreeBranchingLevel"): bset = bset[0] if bset.attrib['uncertaintyType'] == 'sourceModel': sm_nodes = [] for smpath in logictree.collect_info(what).smpaths: sm_nodes.append(nrml.read(smpath)) print(source_model_info(sm_nodes)) elif bset.attrib['uncertaintyType'] == 'gmpeModel': print(logictree.GsimLogicTree(what)) else: print(node.to_str()) elif what.endswith(('.ini', '.zip')): with Monitor('info', measuremem=True) as mon: if report: print('Generated', reportwriter.build_report(what)) else: print(readinput.get_oqparam(what).json()) if mon.duration > 1: print(mon) elif what: print("No info for '%s'" % what)
import sys import numpy as np import multiprocessing from collections import OrderedDict #from openquake.hazardlib.calc.hazard_curve import hazard_curves from openquake.hazardlib.calc import hazard_curve from openquake.hazardlib.site import Site, SiteCollection from openquake.hazardlib.gsim import get_available_gsims from openquake.hazardlib.gsim.base import ContextMaker from openquake.hazardlib.calc import filters from openquake.hazardlib import imt from openquake.hazardlib.geo.point import Point from hmtk.sources.source_model import mtkSourceModel DEFAULT_WORKERS = multiprocessing.cpu_count() GSIM_MAP = get_available_gsims() def _check_supported_imts(imts): """ Checks that all of the IMTs in the list are supported """ output_imts = [] for imtx in imts: if imtx in imt.__all__: output_imts.append(imt.__dict__[imtx]()) elif 'SA' in imtx: output_imts.append(imt.from_string(imtx)) else: raise ValueError('IMT %s not supported in OpenQuake!' % imtx) return output_imts
import ast import logging import textwrap import collections from decimal import Decimal import numpy from openquake.baselib.python3compat import with_metaclass from openquake.baselib import hdf5 from openquake.hazardlib import imt, scalerel, gsim from openquake.baselib.general import distinct SCALEREL = scalerel.get_available_magnitude_scalerel() GSIM = gsim.get_available_gsims() # more tests are in tests/valid_test.py def gsim(value, **kwargs): """ Make sure the given value is the name of an available GSIM class. >>> gsim('BooreAtkinson2011') 'BooreAtkinson2011()' """ if value == 'FromFile': return 'FromFile' elif value.endswith('()'): value = value[:-2] # strip parenthesis try:
def gsims(cls): '''Returns a (new) dict of: gsim_name (string) mapped to its gsim_class (class object) defining all Ground Shaking Intensity Models defined in OpenQuake. ''' return dict(get_available_gsims())
Sets up a simple rupture-site configuration to allow for physical comparison of GMPEs ''' import numpy as np from collections import Iterable, OrderedDict from math import floor, ceil from sets import Set import matplotlib import matplotlib.pyplot as plt from openquake.hazardlib import gsim, imt from openquake.hazardlib.scalerel.wc1994 import WC1994 from smtk.sm_utils import _save_image, _save_image_tight import smtk.trellis.trellis_utils as utils from smtk.trellis.configure import GSIMRupture AVAILABLE_GSIMS = gsim.get_available_gsims() PARAM_DICT = {'magnitudes': [], 'distances': [], 'distance_type': 'rjb', 'vs30': [], 'strike': None, 'dip': None, 'rake': None, 'ztor': None, 'hypocentre_location': (0.5, 0.5), 'hypo_loc': (0.5, 0.5), 'msr': WC1994()} PLOT_UNITS = {'PGA': 'g', 'PGV': 'cm/s',
def get_gsims(trts, imts): '''Writes all Gsims from OpenQuake to the db''' entity_type = ENTITIES[0][0] trts_d = {_.oq_name: _ for _ in trts} imts_d = {_.key: _ for _ in imts} create_err = Error.objects.create # pylint: disable=no-member create_gsim = Gsim.objects.create # pylint: disable=no-member gsims = [] with warnings.catch_warnings(): # Catch warnings as if they were exceptions, and skip deprecation w. # https://stackoverflow.com/a/30368735 warnings.filterwarnings('error') # raises every time, not only 1st for key, gsim in get_available_gsims().items(): if inspect.isabstract(gsim): continue warning = '' needs_args = False try: gsim_inst = gsim() except TypeError: gsim_inst = gsim needs_args = True except (OSError, NotImplementedError) as exc: create_err(type=exc.__class__.__name__, message=str(exc), entity_type=entity_type, entity_key=key) continue except OQDeprecationWarning as warn: # the builtin DeprecationWarning is silenced, OQ uses it's own create_err(type=warn.__class__.__name__, message=str(warn), entity_type=entity_type, entity_key=key) continue # except NotVerifiedWarning as warn: # warning = str(warn) except Warning as warn: warning = str(warn) try: gsim_imts = gsim_inst.DEFINED_FOR_INTENSITY_MEASURE_TYPES except AttributeError as exc: create_err(type=exc.__class__.__name__, message=str(exc), entity_type=entity_type, entity_key=key) continue if not gsim_imts and hasattr(gsim_imts, '__iter__'): create_err(type=Exception.__name__, message='No IMT defined', entity_type=entity_type, entity_key=key) continue try: trt = trts_d[gsim_inst.DEFINED_FOR_TECTONIC_REGION_TYPE] except KeyError: create_err(type=Exception.__name__, message='%s is not a valid TRT' % str(gsim_inst.DEFINED_FOR_TECTONIC_REGION_TYPE), entity_type=entity_type, entity_key=key) continue except AttributeError: create_err(type=Exception.__name__, message='No TRT defined', entity_type=entity_type, entity_key=key) continue # convert gsim imts (classes) into strings: gsim_imts = [_.__name__ for _ in gsim_imts] # and not convert to Imt model instances: gsim_imts = [imts_d[_] for _ in gsim_imts if _ in imts_d] if not gsim_imts: create_err(type=Exception.__name__, message='No IMT in %s' % str([_.key for _ in imts]), entity_type=entity_type, entity_key=key) continue gsim = create_gsim(key=key, trt=trt, warning=warning, needs_args=needs_args) gsim.imts.set(gsim_imts) gsims.append(gsim) return gsims
import numpy as np from copy import deepcopy from openquake.hazardlib.imt import PGA, PGV, SA from openquake.hazardlib.site import Site, SiteCollection from openquake.hazardlib.geo.point import Point from openquake.hazardlib import gsim from openquake.hazardlib.pmf import PMF from openquake.hazardlib.geo.nodalplane import NodalPlane from openquake.hazardlib.scalerel.wc1994 import WC1994 from openquake.hazardlib.mfd.truncated_gr import TruncatedGRMFD from openquake.hazardlib.tom import PoissonTOM from openquake.hazardlib.source.point import PointSource from openquake.hazardlib.calc.hazard_curve import calc_hazard_curves import hmtk.hazard as haz SUPPORTED_GSIMS = gsim.get_available_gsims() #class TestCheckSupportedIMTs(unittest.TestCase): # """ # Checks the pre-processor for ensuring IMT input is correctly formatted # """ # def setUp(self): # """ # # """ # self.imt_list = None # # def test_correct_input(self): # """ # Checks the output when a correctly formatted list of IMTs is passed # """
} } TAB_STYLE = {'height': '100vh', 'width': '100vw'} IMC_MAPPINGS = { 'rotd50.0': 'ROTD50.0', 'arithmetic_mean': 'Arithmetic Mean', 'geometric_mean': 'Geometric Mean', 'quadratic_mean': 'Quadratic Mean' } MAPBOX_ACCESS_TOKEN = ('pk.eyJ1IjoianJla29za2UiLCJhIjoiY2p5amJ3ZnJpMDIzbjNlbjd' 'jdXJ5bTQ5MSJ9.QjIZKHgkxz21YOKkxLBOVQ') IMT_REGEX = re.compile(r'PGA|PGV|SA\(*|FAS\(*|ARIAS|DURATION') DIST_REGEX = re.compile(r'.*Distance$') MODELS_DICT = {} for item in get_available_gsims().items(): with warnings.catch_warnings(record=True) as caught_warnings: try: item[1]() except Exception: continue if not caught_warnings: MODELS_DICT[item[0]] = item[1] ALL_PARAMS = [ param for param_type in DEFAULT_PARAMS.keys() for param in DEFAULT_PARAMS[param_type] ] AZIMUTH = 0 DIST_DICT = { 'EpicentralDistance': ('repi', 'Epicentral distance (km)'), 'HypocentralDistance': ('rhypo', 'Hypocentral distance (km)'),
import subprocess import h5py import numpy as np import pandas as pd from openquake.hazardlib.geo import Point, PlanarSurface, MultiSurface, Mesh from openquake.hazardlib.gsim import get_available_gsims from openquake.hazardlib.imt import PGA, PGV, SA, from_string from openquake.hazardlib.contexts import (ContextMaker, get_distances, SitesContext, DistancesContext) from openquake.hazardlib.source.rupture import ParametricProbabilisticRupture from openquake.hazardlib.site import Site, SiteCollection from openquake.hazardlib.scalerel.wc1994 import WC1994 from openquake.hazardlib import const import synthetic_rupture_generator as srg GSIM_SET = get_available_gsims() def create_planar_surface(top_centroid, strike, dip, area, aspect): """ Given a central location, create a simple planar rupture :param top_centroid: Centroid of trace of the rupture, as instance of :class: openquake.hazardlib.geo.point.Point :param float strike: Strike of rupture(Degrees) :param float dip: Dip of rupture (degrees) :param float area: Area of rupture (km^2) :param float aspect:
def compute_cs(t_cs, bgmpe, sctx, rctx, dctx, im_type, t_star, rrup, mag, avg_periods, corr_type, im_star, gmpe_input): """ Compute the conditional spectrum according to the procedure outlined in Baker JW, Lee C. An Improved Algorithm for Selecting Ground Motions to Match a Conditional Spectrum. J Earthq Eng 2018;22:708-23. https://doi.org/10.1080/13632469.2016.1264334. When the IM and the GMM are defined for the maximum of the two horizontal components, the The Boore and Kishida (2017) relationship is applied to convert the maximum of the two horizontal components into `RotD50`. This is done only for `PGA` and `SA`. """ import numpy as np import sys from openquake.hazardlib import imt, const, gsim from .compute_avgSA import compute_rho_avgsa from .modified_akkar_correlation_model import ModifiedAkkarCorrelationModel # Use the same periods as the available spectra to construct the # conditional spectrum p = [] s = [const.StdDev.TOTAL] if im_type == 'AvgSA': _ = gsim.get_available_gsims() p = imt.AvgSA() mgmpe = gsim.mgmpe.generic_gmpe_avgsa.GenericGmpeAvgSA \ (gmpe_name=gmpe_input, avg_periods=avg_periods, corr_func=corr_type) mu_im_cond, sigma_im_cond = mgmpe.get_mean_and_stddevs( sctx, rctx, dctx, p, s) else: if im_type == 'PGA': p = imt.PGA() else: p = imt.SA(t_star) s = [const.StdDev.TOTAL] mu_im_cond, sigma_im_cond = bgmpe().get_mean_and_stddevs( sctx, rctx, dctx, p, s) sigma_im_cond = sigma_im_cond[0] if (bgmpe.DEFINED_FOR_INTENSITY_MEASURE_COMPONENT == 'Greater of two horizontal'): if im_type == 'PGA' or im_type == 'SA': from shakelib.conversions.imc.boore_kishida_2017 import \ BooreKishida2017 bk17 = BooreKishida2017(const.IMC.GREATER_OF_TWO_HORIZONTAL, const.IMC.RotD50) mu_im_cond = bk17.convertAmps(p, mu_im_cond, rrup, float(mag)) sigma_im_cond = bk17.convertSigmas(p, sigma_im_cond[0]) else: sys.exit('Error: conversion between intensity measures is not ' 'possible for AvgSA') # Compute how many standard deviations the PSHA differs from # the GMPE value epsilon = (np.log(im_star) - mu_im_cond) / sigma_im_cond mu_im = np.zeros(len(t_cs)) sigma_im = np.zeros(len(t_cs)) rho_t_tstar = np.zeros(len(t_cs)) mu_im_im_cond = np.zeros(len(t_cs)) for i in range(len(t_cs)): # Get the GMPE ouput for a rupture scenario if t_cs[i] == 0.: p = imt.PGA() else: p = imt.SA(t_cs[i]) s = [const.StdDev.TOTAL] mu0, sigma0 = bgmpe().get_mean_and_stddevs(sctx, rctx, dctx, p, s) if (bgmpe.DEFINED_FOR_INTENSITY_MEASURE_COMPONENT == 'Greater of two horizontal'): if im_type == 'PGA' or im_type == 'SA': from shakelib.conversions.imc.boore_kishida_2017 \ import BooreKishida2017 bk17 = BooreKishida2017(const.IMC.GREATER_OF_TWO_HORIZONTAL, const.IMC.RotD50) mu0 = bk17.convertAmps(p, mu0, rrup, float(mag)) sigma0 = bk17.convertSigmas(p, sigma0[0]) mu_im[i] = mu0[0] sigma_im[i] = sigma0[0][0] rho = None if im_type == 'AvgSA': rho = compute_rho_avgsa(t_cs[i], avg_periods, sctx, rctx, dctx, sigma_im_cond, bgmpe, corr_type) rho = rho[0] else: if corr_type == 'baker_jayaram': rho = gsim.mgmpe.generic_gmpe_avgsa. \ BakerJayaramCorrelationModel([t_cs[i], t_star])(0, 1) if corr_type == 'akkar': rho = ModifiedAkkarCorrelationModel([t_cs[i], t_star])(0, 1) rho_t_tstar[i] = rho # Get the value of the CMS mu_im_im_cond[i] = \ mu_im[i] + rho_t_tstar[i] * epsilon[0] * sigma_im[i] # Compute covariances and correlations at all periods cov = np.zeros((len(t_cs), len(t_cs))) for i in np.arange(len(t_cs)): for j in np.arange(len(t_cs)): var1 = sigma_im[i]**2 var2 = sigma_im[j]**2 var_tstar = sigma_im_cond**2 sigma_corr = [] if corr_type == 'baker_jayaram': sigma_corr = gsim.mgmpe.generic_gmpe_avgsa. \ BakerJayaramCorrelationModel([t_cs[i], t_cs[j]])(0, 1) * \ np.sqrt(var1 * var2) if corr_type == 'akkar': sigma_corr = ModifiedAkkarCorrelationModel([t_cs[i], t_cs[j]])(0, 1) * \ np.sqrt(var1 * var2) sigma11 = np.matrix([[var1, sigma_corr], [sigma_corr, var2]]) sigma22 = np.array(var_tstar) sigma12 = np.array([ rho_t_tstar[i] * np.sqrt(var1 * var_tstar), rho_t_tstar[j] * np.sqrt(var_tstar * var2) ]) sigma_cond = sigma11 - sigma12 * 1. / (sigma22) * sigma12.T cov[i, j] = sigma_cond[0, 1] # find covariance values of zero and set them to a small number # so that random number generation can be performed cov[np.absolute(cov) < 1e-10] = 1e-10 stdevs = np.sqrt(np.diagonal(cov)) return mu_im_im_cond, cov, stdevs
from openquake.hazardlib.scalerel.wc1994 import WC1994 from smtk.sm_utils import _save_image, _save_image_tight import smtk.trellis.trellis_utils as utils from smtk.trellis.configure import GSIMRupture # Default - defines a 21 color and line-type cycle matplotlib.rcParams["axes.prop_cycle"] = \ cycler(u'color', [u'b', u'g', u'r', u'c', u'm', u'y', u'k', u'b', u'g', u'r', u'c', u'm', u'y', u'k', u'b', u'g', u'r', u'c', u'm', u'y', u'k']) +\ cycler(u'linestyle', ["-", "-", "-", "-", "-", "-", "-", "--", "--", "--", "--", "--", "--", "--", "-.", "-.", "-.", "-.", "-.", "-.", "-."]) # Get a list of the available GSIMs AVAILABLE_GSIMS = gsim.get_available_gsims() # Generic dictionary of parameters needed for a trellis calculation PARAM_DICT = { 'magnitudes': [], 'distances': [], 'distance_type': 'rjb', 'vs30': [], 'strike': None, 'dip': None, 'rake': None, 'ztor': None, 'hypocentre_location': (0.5, 0.5), 'hypo_loc': (0.5, 0.5), 'msr': WC1994() }
def main(what, report=False): """ Give information about the passed keyword or filename """ if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'): os.environ['OQ_DISTRIBUTE'] = 'processpool' if what == 'calculators': for calc in sorted(base.calculators): print(calc) elif what == 'gsims': for gs in gsim.get_available_gsims(): print(gs) elif what == 'imts': for im in gen_subclasses(imt.IMT): print(im.__name__) elif what == 'views': for name in sorted(view): print(name) elif what == 'exports': dic = groupby(export, operator.itemgetter(0), lambda group: [r[1] for r in group]) n = 0 for exporter, formats in dic.items(): print(exporter, formats) n += len(formats) print('There are %d exporters defined.' % n) elif what == 'extracts': for key in extract: func = extract[key] if hasattr(func, '__wrapped__'): fm = FunctionMaker(func.__wrapped__) elif hasattr(func, 'func'): # for partial objects fm = FunctionMaker(func.func) else: fm = FunctionMaker(func) print('%s(%s)%s' % (fm.name, fm.signature, fm.doc)) elif what == 'parameters': params = [] for val in vars(OqParam).values(): if hasattr(val, 'name'): params.append(val) params.sort(key=lambda x: x.name) for param in params: print(param.name) elif what == 'mfds': for cls in gen_subclasses(BaseMFD): print(cls.__name__) elif what == 'sources': for cls in gen_subclasses(BaseSeismicSource): print(cls.__name__) elif os.path.isdir(what) and report: with Monitor('info', measuremem=True) as mon: with mock.patch.object(logging.root, 'info'): # reduce logging do_build_reports(what) print(mon) elif what.endswith('.xml'): node = nrml.read(what) if node[0].tag.endswith('sourceModel'): print(source_model_info([node])) elif node[0].tag.endswith('logicTree'): sm_nodes = [] for smpath in logictree.collect_info(what).smpaths: sm_nodes.append(nrml.read(smpath)) print(source_model_info(sm_nodes)) else: print(node.to_str()) elif what.endswith(('.ini', '.zip')): with Monitor('info', measuremem=True) as mon: if report: print('Generated', reportwriter.build_report(what)) else: print(readinput.get_oqparam(what).json()) if mon.duration > 1: print(mon) elif what: print("No info for '%s'" % what)
def get_available_gsims(request): """ Return a list of strings with the available GSIMs """ gsims = list(gsim.get_available_gsims()) return HttpResponse(content=json.dumps(gsims), content_type=JSON)
import h5py import numpy as np from math import sqrt, ceil from scipy.special import erf from scipy.stats import scoreatpercentile, norm from copy import deepcopy from collections import OrderedDict from openquake.hazardlib.gsim import get_available_gsims from openquake.hazardlib.gsim.gsim_table import GMPETable import smtk.intensity_measures as ims from openquake.hazardlib import imt from smtk.strong_motion_selector import SMRecordSelector from smtk.trellis.trellis_plots import _get_gmpe_name, _check_gsim_list GSIM_LIST = get_available_gsims() GSIM_KEYS = set(GSIM_LIST.keys()) #SCALAR_IMTS = ["PGA", "PGV", "PGD", "CAV", "Ia"] SCALAR_IMTS = ["PGA", "PGV"] STDDEV_KEYS = ["Mean", "Total", "Inter event", "Intra event"] def _check_gsim_list(gsim_list): """ Checks the list of GSIM models and returns an instance of the openquake.hazardlib.gsim class. Raises error if GSIM is not supported in OpenQuake :param list gsim_list: List of GSIM names (str) :returns:
# under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. from openquake.hazardlib.gsim import get_available_gsims from openquake.commonlib import valid GSIMS = get_available_gsims() GROUND_MOTION_CORRELATION_MODELS = [ 'JB2009', 'Jayaram-Baker 2009'] HAZARD_CALCULATORS = [ 'classical', 'disaggregation', 'event_based', 'scenario'] RISK_CALCULATORS = [ 'classical_risk', 'event_based_risk', 'scenario_risk', 'classical_bcr', 'event_based_bcr', 'scenario_damage'] EXPERIMENTAL_CALCULATORS = [ 'event_based_fr'] CALCULATORS = HAZARD_CALCULATORS + RISK_CALCULATORS + EXPERIMENTAL_CALCULATORS
def inizialize_gmm(index, gmpe_input, rjb, mag, z_hyp_input, dip_input, rake, upper_sd_input, lower_sd_input, azimuth_input, fhw, vs30type, vs30_input, z2pt5_input, z1pt0_input, site_code): """ Defines all the parameters for the computation of a Ground Motion Model. If not defined by the user as input parameters, most parameters (dip, hypocentral depth, fault width, ztor, azimuth, source-to-site distances based on extended sources, z2pt5, z1pt0) are defined according to the relationships included in: Kaklamanos J, Baise LG, Boore DM. (2011) Estimating unknown input parameters when implementing the NGA ground-motion prediction equations in engineering practice. Earthquake Spectra 27: 1219-1235. https://doi.org/10.1193/1.3650372. """ import sys from openquake.hazardlib import gsim import numpy as np bgmpe = None for name_gmpe, gmpes in gsim.get_available_gsims().items(): if name_gmpe == gmpe_input: bgmpe = gmpes if bgmpe is None: sys.exit('The GMM is not found') sctx = gsim.base.SitesContext() rctx = gsim.base.RuptureContext() dctx = gsim.base.DistancesContext() # ----------------------------------------------------------------------------- # Initialise contexts if (bgmpe() == '[Ambraseys1996]'): mw = np.exp(1.421 + 0.108 * mag) - 1.863 else: mw = mag dip, z_hyp, width, ztor, azimuth = compute_source_params( mw, z_hyp_input, dip_input, rake, upper_sd_input, lower_sd_input, azimuth_input, fhw) [rx, rrup, ry] = compute_dists(rjb, mw, z_hyp_input, dip_input, rake, upper_sd_input, lower_sd_input, azimuth_input, fhw) [vs30, vs30measured, z1pt0, z2pt5] = compute_soil_params(vs30_input, z2pt5_input, z1pt0_input, gmpe_input, vs30type, index) setattr(rctx, 'width', width) setattr(rctx, 'ztor', ztor) setattr(rctx, 'dip', dip) setattr(dctx, 'rx', rx) setattr(dctx, 'rrup', rrup) setattr(dctx, 'ry0', ry) z1pt0 = z1pt0 + np.zeros(rjb.shape) setattr(sctx, 'z1pt0', z1pt0) z2pt5 = z2pt5 + np.zeros(rjb.shape) setattr(sctx, 'z2pt5', z2pt5) setattr(sctx, 'vs30measured', vs30measured) setattr(rctx, 'mag', mag) setattr(rctx, 'hypo_depth', z_hyp) setattr(rctx, 'rake', rake) setattr(rctx, 'occurrence_rate', 0.) setattr(dctx, 'rjb', rjb) vs30 = vs30 + np.zeros(rjb.shape) setattr(sctx, 'vs30', vs30) sc = site_code[index] + np.zeros(rjb.shape) setattr(sctx, 'sids', sc) return bgmpe, sctx, rctx, dctx, vs30, rrup
import logging import textwrap import collections from decimal import Decimal import numpy from openquake.baselib.python3compat import with_metaclass from openquake.baselib.general import distinct from openquake.baselib import hdf5 from openquake.hazardlib import imt, scalerel, gsim from openquake.hazardlib.calc import disagg from openquake.hazardlib.calc.filters import IntegrationDistance SCALEREL = scalerel.get_available_magnitude_scalerel() GSIM = gsim.get_available_gsims() disagg_outs = ['_'.join(tup) for tup in sorted(disagg.pmf_map)] def disagg_outputs(value): """ Validate disaggregation outputs. For instance >>> disagg_outputs('TRT Mag_Dist') ['TRT', 'Mag_Dist'] """ values = value.split() for val in values: if val not in disagg_outs: raise ValueError('Invalid disagg output: %s' % val)
def info(calculators, gsims, views, exports, extracts, parameters, report, input_file=''): """ Give information. You can pass the name of an available calculator, a job.ini file, or a zip archive with the input files. """ if calculators: for calc in sorted(base.calculators): print(calc) if gsims: for gs in gsim.get_available_gsims(): print(gs) if views: for name in sorted(view): print(name) if exports: dic = groupby(export, operator.itemgetter(0), lambda group: [r[1] for r in group]) n = 0 for exporter, formats in dic.items(): print(exporter, formats) n += len(formats) print('There are %d exporters defined.' % n) if extracts: for key in extract: func = extract[key] if hasattr(func, '__wrapped__'): fm = FunctionMaker(func.__wrapped__) else: fm = FunctionMaker(func) print('%s(%s)%s' % (fm.name, fm.signature, fm.doc)) if parameters: params = [] for val in vars(OqParam).values(): if hasattr(val, 'name'): params.append(val) params.sort(key=lambda x: x.name) for param in params: print(param.name) if os.path.isdir(input_file) and report: with Monitor('info', measuremem=True) as mon: with mock.patch.object(logging.root, 'info'): # reduce logging do_build_reports(input_file) print(mon) elif input_file.endswith('.xml'): node = nrml.read(input_file) if node[0].tag.endswith('sourceModel'): if node['xmlns'].endswith('nrml/0.4'): raise InvalidFile( '%s is in NRML 0.4 format, please run the following ' 'command:\noq upgrade_nrml %s' % ( input_file, os.path.dirname(input_file) or '.')) print(source_model_info([node[0]])) elif node[0].tag.endswith('logicTree'): nodes = [nrml.read(sm_path)[0] for sm_path in logictree.collect_info(input_file).smpaths] print(source_model_info(nodes)) else: print(node.to_str()) elif input_file.endswith(('.ini', '.zip')): with Monitor('info', measuremem=True) as mon: if report: print('Generated', reportwriter.build_report(input_file)) else: print_csm_info(input_file) if mon.duration > 1: print(mon) elif input_file: print("No info for '%s'" % input_file)