Exemple #1
0
 def test_show_attrs(self):
     with Print.patch() as p:
         sap.runline(
             f'openquake.commands show_attrs sitecol {self.calc_id}')
     self.assertEqual(
         '__pdcolumns__ sids lon lat depth vs30 vs30measured\n'
         '__pyclass__ openquake.hazardlib.site.SiteCollection', str(p))
Exemple #2
0
    def test(self):
        inputdir = os.path.dirname(case_16.__file__)
        output = gettemp(suffix='.csv')
        grid_spacing = 50
        exposure_xml = os.path.join(inputdir, 'exposure.xml')
        vs30_csv = os.path.join(inputdir, 'vs30.csv')
        sitecol = sap.runline('openquake.commands prepare_site_model '
                              f'-123 {vs30_csv} -e {exposure_xml} '
                              f'-g {grid_spacing} --assoc-distance=5 '
                              f'-o {output}')
        sm = read_csv(output, {None: float, 'vs30measured': numpy.uint8})
        self.assertEqual(sm['vs30measured'].sum(), 0)
        self.assertEqual(len(sitecol), 84)  # 84 non-empty grid points
        self.assertEqual(len(sitecol), len(sm))

        # test no grid
        sc = sap.runline('openquake.commands prepare_site_model '
                         f'{vs30_csv} -e {exposure_xml}')
        self.assertEqual(len(sc), 148)  # 148 sites within 5 km from the params

        # test sites_csv == vs30_csv and no grid
        sc = sap.runline('openquake.commands prepare_site_model '
                         f'{vs30_csv} -s{vs30_csv} -12 -a5 -o {output}')

        # test sites_csv == vs30_csv and grid spacing
        sc = sap.runline('openquake.commands prepare_site_model '
                         f'{vs30_csv} -s{vs30_csv} -12 -g10 -a5 -o {output}')
Exemple #3
0
 def test_export_calc(self):
     tempdir = tempfile.mkdtemp()
     with Print.patch() as p:
         sap.runline('openquake.commands export hcurves -e csv '
                     f'--export-dir={tempdir}')
     fnames = os.listdir(tempdir)
     self.assertIn(str(fnames[0]), str(p))
     shutil.rmtree(tempdir)
Exemple #4
0
 def test_sensitivity(self):
     # test the sensitivity of the UHS from the area_source_discretization
     job_ini = os.path.join(os.path.dirname(case_56.__file__), 'job.ini')
     sap.runline(f'openquake.commands engine --run {job_ini} -c 0')
     with Print.patch() as p:
         sap.runline('openquake.commands compare uhs -1 -2')
     print(p)
     self.assertIn('rms-diff', str(p))
Exemple #5
0
 def test_extract_sitecol(self):
     tempdir = tempfile.mkdtemp()
     with Print.patch() as p:
         sap.runline("openquake.commands extract sitecol "
                     f"{self.calc_id} --extract-dir={tempdir}")
     fnames = os.listdir(tempdir)
     self.assertIn(str(fnames[0]), str(p))
     shutil.rmtree(tempdir)
Exemple #6
0
 def test_source_model(self):
     tempdir = tempfile.mkdtemp()
     dest = os.path.join(tempdir, 'source_model.xml')
     shutil.copy(os.path.join(self.TESTDIR, 'source_model.xml'), tempdir)
     with Print.patch() as p:
         sap.runline(f'openquake.commands sample {dest} 0.5')
     self.assertIn('Extracted 9 nodes out of 15', str(p))
     shutil.rmtree(tempdir)
Exemple #7
0
 def test_sites_csv(self):
     testdir = os.path.dirname(case_5.__file__)
     tempdir = tempfile.mkdtemp()
     dest = os.path.join(tempdir, 'sites.csv')
     shutil.copy(os.path.join(testdir, 'sites.csv'), tempdir)
     with Print.patch() as p:
         sap.runline(f'openquake.commands sample {dest} 0.5')
     self.assertIn('Extracted 50 lines out of 99', str(p))
     shutil.rmtree(tempdir)
Exemple #8
0
 def test_roundtrip_valid_05(self):
     # test the conversion to shapefile and back for a valid file NRML 0.5
     ssm = os.path.join(os.path.dirname(__file__), "data",
                        "sample_source_model_05.xml")
     sap.runline(f'openquake.commands to_shapefile -v {ssm} -o{self.out}')
     shpfiles = ' '.join(
         os.path.join(self.OUTDIR, f) for f in os.listdir(self.OUTDIR))
     sap.runline(
         f'openquake.commands from_shapefile -v {shpfiles} -o{self.out}')
Exemple #9
0
 def test_db(self):
     # the some db commands bypassing the dbserver
     with Print.patch(), mock.patch('openquake.commonlib.logs.dbcmd',
                                    manage.fakedbcmd):
         sap.runline('openquake.commands db db_version')
         try:
             sap.runline('openquake.commands db calc_info 1')
         except dbapi.NotFound:  # happens on an empty db
             pass
Exemple #10
0
 def test_shakemap(self):
     # zipping *.npy shakemap with relative path for ShakeMaps
     dtemp = os.path.join(tempfile.mkdtemp(), 'inp')
     shutil.copytree(os.path.dirname(case_shakemap.__file__), dtemp)
     sap.runline(f'openquake.commands zip {dtemp}/job.ini {dtemp}/job.zip')
     job_zip = os.path.join(dtemp, 'job.zip')
     names = sorted(zipfile.ZipFile(job_zip).namelist())
     self.assertIn('shakefile/usp000fjta.npy', names)
     shutil.rmtree(dtemp)
Exemple #11
0
 def test_shapefile_zipped(self):
     # zipping shapefile archive used for ShakeMaps
     dtemp = os.path.join(tempfile.mkdtemp(), 'inp')
     shutil.copytree(os.path.dirname(case_shapefile.__file__), dtemp)
     sap.runline(
         f'openquake.commands zip {dtemp}/job_zipped.ini {dtemp}/job.zip')
     job_zip = os.path.join(dtemp, 'job.zip')
     names = sorted(zipfile.ZipFile(job_zip).namelist())
     self.assertIn('shp/shapefiles.zip', names)
     shutil.rmtree(dtemp)
Exemple #12
0
 def test_nrml_to_csv(self):
     temp_dir = tempfile.mkdtemp()
     with Print.patch() as p:
         sap.runline(f'openquake.commands nrml_to csv {MIXED_SRC_MODEL} '
                     f'--outdir={temp_dir} --chatty')
     out = str(p)
     self.assertIn('3D MultiPolygon', out)
     self.assertIn('3D MultiLineString', out)
     self.assertIn('Point', out)
     shutil.rmtree(temp_dir)
Exemple #13
0
 def test_zip_gmf_ebrisk(self):
     # this is a case without gsims and with a gmf file
     ini = os.path.join(os.path.dirname(ebrisk.__file__), 'job_risk.ini')
     dtemp = tempfile.mkdtemp()
     xzip = os.path.join(dtemp, 'x.zip')
     sap.runline(f'openquake.commands zip {ini} {xzip}')
     names = sorted(zipfile.ZipFile(xzip).namelist())
     self.assertEqual(['exposure_model.xml', 'gmf_scenario.csv',
                       'job_risk.ini', 'sites.csv', 'vulnerability.xml'],
                      names)
     shutil.rmtree(dtemp)
Exemple #14
0
    def test_logictree(self):
        path = os.path.join(os.path.dirname(case_9.__file__),
                            'source_model_logic_tree.xml')
        with Print.patch() as p:
            sap.runline('openquake.commands info ' + path)
        self.assertEqual(str(p), """\
==================== ===========
TRT                  pointSource
==================== ===========
active shallow crust 1          
Total                1          
==================== ===========""")
Exemple #15
0
 def test_nrml_to_gpkg(self):
     try:
         import fiona
     except ImportError:
         raise unittest.SkipTest('fiona is missing')
     temp_dir = tempfile.mkdtemp()
     with Print.patch() as p:
         sap.runline(f'openquake.commands nrml_to gpkg {MIXED_SRC_MODEL} '
                     f'--outdir={temp_dir} --chatty')
     out = str(p)
     self.assertIn('3D MultiPolygon', out)
     self.assertIn('3D MultiLineString', out)
     self.assertIn('Point', out)
     shutil.rmtree(temp_dir)
Exemple #16
0
 def test_reduce_sm_with_duplicate_source_ids(self):
     # testing reduce_sm in case of two sources with the same ID and
     # different codes
     temp_dir = tempfile.mkdtemp()
     calc_dir = os.path.dirname(to_reduce.__file__)
     shutil.copytree(calc_dir, os.path.join(temp_dir, 'data'))
     job_ini = os.path.join(temp_dir, 'data', 'job.ini')
     with Print.patch():
         calc = sap.runline(f'openquake.commands run {job_ini}')
     calc_id = calc.datastore.calc_id
     with mock.patch('logging.info') as info:
         sap.runline(f'openquake.commands reduce_sm {calc_id}')
     self.assertIn('Removed %d/%d sources', info.call_args[0][0])
     shutil.rmtree(temp_dir)
Exemple #17
0
 def test_zip_ebr(self):
     # this is a case with an exposure.csv
     ini = os.path.join(os.path.dirname(case_eb.__file__), 'job_eb.ini')
     dtemp = tempfile.mkdtemp()
     xzip = os.path.join(dtemp, 'x.zip')
     sap.runline(f'openquake.commands zip {ini} {xzip}')
     names = sorted(zipfile.ZipFile(xzip).namelist())
     self.assertEqual([
         'exposure.csv', 'exposure1.xml', 'gmpe_logic_tree.xml',
         'job_eb.ini', 'policy.csv', 'source_model.xml',
         'source_model_logic_tree.xml', 'vulnerability_model_nonstco.xml',
         'vulnerability_model_stco.xml'
     ], names)
     shutil.rmtree(dtemp)
Exemple #18
0
 def test_zip(self):
     # this is a case with .hdf5 files
     ini = os.path.join(os.path.dirname(case_18.__file__), 'job.ini')
     dtemp = tempfile.mkdtemp()
     xzip = os.path.join(dtemp, 'x.zip')
     sap.runline(f'openquake.commands zip {ini} {xzip}')
     names = sorted(zipfile.ZipFile(xzip).namelist())
     self.assertEqual([
         'Wcrust_high_rhypo.hdf5', 'Wcrust_low_rhypo.hdf5',
         'Wcrust_med_rhypo.hdf5', 'job.ini', 'nbc_asc_logic_tree.xml',
         'source_model_logic_tree.xml', 'vancouver_area_source.xml',
         'vancouver_school_sites.csv'
     ], names)
     shutil.rmtree(dtemp)
Exemple #19
0
 def test_oqdata(self):
     # the that the environment variable OQ_DATADIR is honored
     job_ini = os.path.join(os.path.dirname(case_2.__file__), 'job_2.ini')
     tempdir = tempfile.mkdtemp()
     dbserver.ensure_on()
     with mock.patch.dict(os.environ, OQ_DATADIR=tempdir):
         [(job_id, oq)] = run_jobs([job_ini], log_level='error')
         job = commonlib.logs.dbcmd('get_job', job_id)
         self.assertTrue(job.ds_calc_dir.startswith(tempdir),
                         job.ds_calc_dir)
     with Print.patch() as p:
         sap.runline(f'openquake.commands export ruptures {job_id} -e csv'
                     f' --export-dir={tempdir}')
     self.assertIn('Exported', str(p))
     shutil.rmtree(tempdir)
Exemple #20
0
 def setUpClass(cls):
     """
     Build a datastore instance to show what it is inside
     """
     job_ini = os.path.join(os.path.dirname(case_1.__file__), 'job.ini')
     with Print.patch() as cls.p:
         calc = sap.runline(f'openquake.commands run {job_ini} -c 0')
     cls.calc_id = calc.datastore.calc_id
Exemple #21
0
    def test_invalid(self):
        fname = gettemp('''\
<?xml version="1.0" encoding="utf-8"?>
<nrml xmlns="http://openquake.org/xmlns/nrml/0.4"
      xmlns:gml="http://www.opengis.net/gml">
<gmfCollection gsimTreePath="" sourceModelTreePath="">
  <gmfSet stochasticEventSetId="1">
    <gmf IMT="PGA" ruptureId="0">
      <node gmv="0.012646" lon="12.12477995" lat="43.5812"/>
      <node gmv="-0.012492" lon="12.12478193" lat="43.5812"/>
    </gmf>
  </gmfSet>
</gmfCollection>
</nrml>''', suffix='.xml')
        with Print.patch() as p:
            sap.runline('openquake.commands tidy ' + fname)
        self.assertIn('Could not convert gmv->positivefloat: '
                      'float -0.012492 < 0, line 8', str(p))
Exemple #22
0
    def test_ok(self):
        fname = gettemp('''\
<?xml version="1.0" encoding="utf-8"?>
<nrml xmlns="http://openquake.org/xmlns/nrml/0.4"
      xmlns:gml="http://www.opengis.net/gml">
<gmfCollection gsimTreePath="" sourceModelTreePath="">
  <gmfSet stochasticEventSetId="1">
    <gmf IMT="PGA" ruptureId="0">
      <node gmv="0.0126515007046" lon="12.12477995" lat="43.5812"/>
      <node gmv="0.0124056290492" lon="12.12478193" lat="43.5812"/>
    </gmf>
  </gmfSet>
</gmfCollection>
</nrml>''',
                        suffix='.xml')
        with Print.patch() as p:
            sap.runline('openquake.commands tidy ' + fname)
        self.assertIn('Reformatted', str(p))
        self.assertEqual(
            open(fname).read(), '''\
<?xml version="1.0" encoding="utf-8"?>
<nrml
xmlns="http://openquake.org/xmlns/nrml/0.4"
xmlns:gml="http://www.opengis.net/gml"
>
    <gmfCollection
    gsimTreePath=""
    sourceModelTreePath=""
    >
        <gmfSet
        stochasticEventSetId="1"
        >
            <gmf
            IMT="PGA"
            ruptureId="0"
            >
                <node gmv="1.26515E-02" lat="4.35812E+01" lon="1.21248E+01"/>
                <node gmv="1.24056E-02" lat="4.35812E+01" lon="1.21248E+01"/>
            </gmf>
        </gmfSet>
    </gmfCollection>
</nrml>
''')
Exemple #23
0
    def test(self):
        tmpdir = tempfile.mkdtemp()
        path = os.path.join(tmpdir, 'vf.xml')
        with open(path, 'w') as f:
            f.write('''\
<?xml version="1.0"?>
<nrml xmlns="http://openquake.org/xmlns/nrml/0.4" xmlns:gml="http://www.opengis.net/gml">
    <vulnerabilityModel>
        <discreteVulnerabilitySet vulnerabilitySetID="vm1" assetCategory="buildings" lossCategory="Economic_loss">
            <IML IMT="PGA">0.01	0.040408163	0.070816327	0.10122449	0.131632653	0.162040816	0.19244898	0.222857143	0.253265306	0.283673469	0.314081633	0.344489796	0.374897959	0.405306122	0.435714286	0.466122449	0.496530612	0.526938776	0.557346939	0.587755102	0.618163265	0.648571429	0.678979592	0.709387755	0.739795918	0.770204082	0.800612245	0.831020408	0.861428571	0.891836735	0.922244898	0.952653061	0.983061224	1.013469388	1.043877551	1.074285714	1.104693878	1.135102041	1.165510204	1.195918367	1.226326531	1.256734694	1.287142857	1.31755102	1.347959184	1.378367347	1.40877551	1.439183673	1.469591837	1.5 </IML>
            <discreteVulnerability vulnerabilityFunctionID="A-SPSB-1" probabilisticDistribution="LN">
                   <lossRatio>0	0.000409057	0.018800826	0.196366309	0.709345589	0.991351187	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1	1</lossRatio>
                  <coefficientsVariation>0	0.000509057	0.022010677	0.186110985	0.241286071	0.010269671	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0	0 </coefficientsVariation>
            </discreteVulnerability>
        </discreteVulnerabilitySet>
    </vulnerabilityModel>
</nrml>''')
        sap.runline(f'openquake.commands upgrade_nrml {tmpdir}')
        shutil.rmtree(tmpdir)
Exemple #24
0
    def test_ebr(self):
        # test a single case of `run_jobs`, but it is the most complex one,
        # event based risk with post processing
        job_ini = os.path.join(os.path.dirname(case_master.__file__),
                               'job.ini')
        with Print.patch() as p:
            [log] = run_jobs(create_jobs([job_ini], 'error'))
        self.assertIn('id | name', str(p))

        # check the exported outputs
        expected = set('''\
Aggregate Event Losses
Aggregate Loss Curves
Aggregate Loss Curves Statistics
Aggregate Losses
Aggregate Losses Statistics
Average Asset Losses
Average Asset Losses Statistics
Average Ground Motion Field
Earthquake Ruptures
Events
Full Report
Ground Motion Fields
Hazard Curves
Hazard Maps
Input Files
Realizations
Source Loss Table'''.splitlines())
        with Print.patch() as p:
            sap.runline(f'openquake.commands engine --lo {log.calc_id}')
        got = set(re.findall(r'\| ([\w ]+)', str(p))) - {'name'}
        if got != expected:
            print('Missing output', expected - got, file=sys.stderr)
        # sanity check on the performance views: make sure that the most
        # relevant information is stored (it can be lost due to a wrong
        # refactoring of the monitoring and it happened several times)
        with read(log.calc_id) as dstore:
            perf = str(view('performance', dstore))
            self.assertIn('total event_based_risk', perf)
Exemple #25
0
    def test_show_calc(self):
        with Print.patch() as p:
            sap.runline('openquake.commands show contents %d' % self.calc_id)
        self.assertIn('sitecol', str(p))

        with Print.patch() as p:
            sap.runline('openquake.commands show sitecol %d' % self.calc_id)
        self.assertIn('sids,lon,lat,depth,vs30,vs30measured', str(p))

        with Print.patch() as p:
            sap.runline(f'openquake.commands show slow_sources {self.calc_id}')
        self.assertIn('source_id code calc_time num_sites', str(p))
Exemple #26
0
    def test_roundtrip_invalid(self):
        # test the conversion to shapefile and back for an invalid file
        smc = os.path.join(os.path.dirname(__file__), "data",
                           "source_model_complete.xml")

        sap.runline(
            f'openquake.commands to_shapefile {smc} --output={self.out}')
        shpfiles = ' '.join(
            os.path.join(self.OUTDIR, f) for f in os.listdir(self.OUTDIR))
        sap.runline(
            f'openquake.commands from_shapefile {shpfiles} -o{self.out}')

        # test invalid file
        with self.assertRaises(Exception) as ctx:
            sap.runline(
                f'openquake.commands to_shapefile -v {smc} -o{self.out}')
        self.assertIn('Edges points are not in the right order',
                      str(ctx.exception))
Exemple #27
0
 def test_exports(self):
     with Print.patch() as p:
         sap.runline('openquake.commands info exports')
     self.assertGreater(len(str(p)), 10)
Exemple #28
0
 def test_imts(self):
     with Print.patch() as p:
         sap.runline('openquake.commands info imts')
     self.assertGreaterEqual(len(str(p)), 18)
Exemple #29
0
 def test_calculators(self):
     with Print.patch() as p:
         sap.runline('openquake.commands info calculators')
     self.assertGreater(len(str(p)), 10)
Exemple #30
0
 def test_zip(self):
     path = os.path.join(DATADIR, 'frenchbug.zip')
     with Print.patch() as p:
         sap.runline(f'openquake.commands info {path}')
     self.assertIn('hazard_imtls', str(p))