Example #1
0
    def test_case_master(self):
        if sys.platform == 'darwin':
            raise unittest.SkipTest('macOS')
        self.run_calc(case_master.__file__, 'job.ini', insured_losses='false')
        calc0 = self.calc.datastore  # event_based_risk
        self.run_calc(case_master.__file__, 'job.ini', insured_losses='false',
                      calculation_mode='event_based')
        calc1 = self.calc.datastore  # event_based
        self.run_calc(case_master.__file__, 'job.ini', insured_losses='false',
                      calculation_mode='gmf_ebrisk',
                      hazard_calculation_id=str(calc1.calc_id),
                      concurrent_tasks='0')  # to avoid numeric issues
        calc2 = self.calc.datastore  # gmf_ebrisk

        check_csm_info(calc0, calc2)  # the csm_info arrays must be equal

        # compare the average losses for an event_based_risk
        # case_master calculation with an equivalent gmf_ebrisk calculation
        f0 = writetmp(view('mean_avg_losses', calc0))
        self.assertEqualFiles('expected/avg_losses.txt', f0, delta=1E-5)
        f2 = writetmp(view('mean_avg_losses', calc2))
        self.assertEqualFiles('expected/avg_losses.txt', f2, delta=1E-4)

        # compare the event loss table generated by an event_based_risk
        # case_master calculation with an equivalent gmf_ebrisk calculation
        f0 = writetmp(view('elt', calc0))
        self.assertEqualFiles('expected/elt.txt', f0, delta=1E-5)
        f2 = writetmp(view('elt', calc2))
        self.assertEqualFiles('expected/elt.txt', f2, delta=1E-5)
Example #2
0
    def test_case_master(self):
        self.assert_stats_ok(case_master, 'job.ini', individual_curves='false')

        fnames = export(('loss_maps-rlzs', 'csv'), self.calc.datastore)
        for fname in fnames:
            self.assertEqualFiles('expected/' + strip_calc_id(fname), fname)

        fname = writetmp(view('portfolio_loss', self.calc.datastore))
        self.assertEqualFiles('expected/portfolio_loss.txt', fname, delta=1E-5)

        # check rup_data is stored correctly
        fname = writetmp(view('ruptures_events', self.calc.datastore))
        self.assertEqualFiles('expected/ruptures_events.txt', fname)

        # export a specific eid
        fnames = export(('all_loss_ratios:0', 'csv'), self.calc.datastore)
        for fname in fnames:
            self.assertEqualFiles('expected/' + strip_calc_id(fname), fname)
        self.assertEqualFiles('expected/losses-eid=0.csv', fname)

        # export a specific pair (sm_id, eid)
        fnames = export(('all_loss_ratios:1:0', 'csv'),
                        self.calc.datastore)
        for fname in fnames:
            self.assertEqualFiles('expected/%s' % strip_calc_id(fname), fname)
Example #3
0
    def test_wrong_sites_csv(self):
        sites_csv = general.writetmp(
            'site_id,lon,lat\n1,1.0,2.1\n2,3.0,4.1\n3,5.0,6.1')
        source = general.writetmp("""
[general]
calculation_mode = scenario
[geometry]
sites_csv = %s
[misc]
maximum_distance=1
truncation_level=3
random_seed=5
[site_params]
reference_vs30_type = measured
reference_vs30_value = 600.0
reference_depth_to_2pt5km_per_sec = 5.0
reference_depth_to_1pt0km_per_sec = 100.0
intensity_measure_types_and_levels = {'PGA': [0.1, 0.2]}
investigation_time = 50.
export_dir = %s
""" % (os.path.basename(sites_csv), TMP))
        oq = readinput.get_oqparam(source)
        with self.assertRaises(InvalidFile) as ctx:
            readinput.get_mesh(oq)
        self.assertIn('expected site_id=0, got 1', str(ctx.exception))
        os.unlink(sites_csv)
Example #4
0
    def test_get_oqparam_with_files(self):
        temp_dir = tempfile.mkdtemp()
        source_model_input = general.writetmp(dir=temp_dir)
        site_model_input = general.writetmp(dir=temp_dir, content="foo")
        job_config = general.writetmp(dir=temp_dir,
                                      content="""
[general]
calculation_mode = event_based
[site]
sites = 0 0
source_model_file = %s
site_model_file = %s
maximum_distance=1
truncation_level=0
random_seed=0
intensity_measure_types = PGA
investigation_time = 50
export_dir = %s
        """ % (os.path.basename(source_model_input),
               os.path.basename(site_model_input), TMP))

        try:
            exp_base_path = os.path.dirname(job_config)

            expected_params = {
                'export_dir': TMP,
                'base_path': exp_base_path,
                'calculation_mode': 'event_based',
                'truncation_level': 0.0,
                'random_seed': 0,
                'maximum_distance': {
                    'default': 1
                },
                'inputs': {
                    'job_ini': job_config,
                    'site_model': site_model_input,
                    'source': [source_model_input],
                    'source_model': source_model_input
                },
                'sites': [(0.0, 0.0, 0.0)],
                'hazard_imtls': {
                    'PGA': None
                },
                'investigation_time': 50.0,
                'risk_investigation_time': 50.0,
            }

            params = getparams(readinput.get_oqparam(job_config))
            for key in expected_params:
                self.assertEqual(expected_params[key], params[key])
            items = sorted(params['inputs'].items())
            keys, values = zip(*items)
            self.assertEqual(
                ('job_ini', 'site_model', 'source', 'source_model'), keys)
            self.assertEqual(
                (job_config, site_model_input, [source_model_input
                                                ], source_model_input), values)

        finally:
            shutil.rmtree(temp_dir)
Example #5
0
    def test_get_oqparam_with_files(self):
        temp_dir = tempfile.mkdtemp()
        site_model_input = general.writetmp(dir=temp_dir, content="foo")
        job_config = general.writetmp(dir=temp_dir,
                                      content="""
[general]
calculation_mode = event_based
[foo]
bar = baz
[site]
sites = 0 0
site_model_file = %s
maximum_distance=1
truncation_level=0
random_seed=0
intensity_measure_types = PGA
investigation_time = 50
export_dir = %s
        """ % (site_model_input, TMP))

        try:
            exp_base_path = os.path.dirname(job_config)

            expected_params = {
                'export_dir': TMP,
                'base_path': exp_base_path,
                'calculation_mode': 'event_based',
                'truncation_level': 0.0,
                'random_seed': 0,
                'maximum_distance': {
                    'default': 1
                },
                'inputs': {
                    'job_ini': job_config,
                    'site_model': site_model_input
                },
                'sites': [(0.0, 0.0, 0.0)],
                'hazard_imtls': {
                    'PGA': None
                },
                'investigation_time': 50.0,
                'risk_investigation_time': 50.0,
            }

            with mock.patch('logging.warn') as warn:
                params = getparams(readinput.get_oqparam(job_config))
                for key in expected_params:
                    self.assertEqual(expected_params[key], params[key])
                items = sorted(params['inputs'].items())
                keys, values = zip(*items)
                self.assertEqual(('job_ini', 'site_model'), keys)
                self.assertEqual((job_config, site_model_input), values)

                # checking that warnings work
                self.assertEqual(warn.call_args[0][0],
                                 "The parameter 'bar' is unknown, ignoring")
        finally:
            shutil.rmtree(temp_dir)
Example #6
0
    def test_event_based_sampling(self):
        self.run_calc(ucerf.__file__, 'job_ebh.ini')

        # check the GMFs
        gmdata = self.calc.datastore['gmdata'].value
        got = writetmp(rst_table(gmdata, fmt='%.6f'))
        self.assertEqualFiles('expected/gmdata.csv', got)

        # check the mean hazard map
        got = writetmp(view('hmap', self.calc.datastore))
        self.assertEqualFiles('expected/hmap.rst', got)
Example #7
0
    def test_get_oqparam_with_sites_csv(self):
        sites_csv = general.writetmp('1.0,2.1\n3.0,4.1\n5.0,6.1')
        try:
            source = general.writetmp("""
[general]
calculation_mode = classical
[geometry]
sites_csv = %s
[misc]
maximum_distance=1
truncation_level=3
random_seed=5
[site_params]
reference_vs30_type = measured
reference_vs30_value = 600.0
reference_depth_to_2pt5km_per_sec = 5.0
reference_depth_to_1pt0km_per_sec = 100.0
intensity_measure_types_and_levels = {'PGA': [0.1, 0.2]}
investigation_time = 50.
export_dir = %s
            """ % (os.path.basename(sites_csv), TMP))
            exp_base_path = os.path.dirname(
                os.path.join(os.path.abspath('.'), source))

            expected_params = {
                'export_dir': TMP,
                'hazard_calculation_id': 1,
                'base_path': exp_base_path,
                'calculation_mode': 'classical',
                'truncation_level': 3.0,
                'random_seed': 5,
                'maximum_distance': {
                    'default': 1.0
                },
                'inputs': {
                    'job_ini': source,
                    'sites': sites_csv
                },
                'reference_depth_to_1pt0km_per_sec': 100.0,
                'reference_depth_to_2pt5km_per_sec': 5.0,
                'reference_vs30_type': 'measured',
                'reference_vs30_value': 600.0,
                'hazard_imtls': {
                    'PGA': [0.1, 0.2]
                },
                'investigation_time': 50.0,
                'risk_investigation_time': 50.0,
            }

            params = getparams(readinput.get_oqparam(source, hc_id=1))
            self.assertEqual(expected_params, params)
        finally:
            os.unlink(sites_csv)
Example #8
0
    def test_get_oqparam_with_files(self):
        temp_dir = tempfile.mkdtemp()
        site_model_input = general.writetmp(dir=temp_dir, content="foo")
        job_config = general.writetmp(dir=temp_dir, content="""
[general]
calculation_mode = event_based
[foo]
bar = baz
[site]
sites = 0 0
site_model_file = %s
maximum_distance=1
truncation_level=0
random_seed=0
intensity_measure_types = PGA
investigation_time = 50
export_dir = %s
        """ % (site_model_input, TMP))

        try:
            exp_base_path = os.path.dirname(job_config)

            expected_params = {
                'export_dir': TMP,
                'base_path': exp_base_path,
                'calculation_mode': 'event_based',
                'truncation_level': 0.0,
                'random_seed': 0,
                'maximum_distance': {'default': 1},
                'inputs': {'job_ini': job_config,
                           'site_model': site_model_input},
                'sites': [(0.0, 0.0)],
                'hazard_imtls': {'PGA': None},
                'investigation_time': 50.0,
                'risk_investigation_time': 50.0,
            }

            with mock.patch('logging.warn') as warn:
                params = getparams(readinput.get_oqparam(job_config))
                for key in expected_params:
                    self.assertEqual(expected_params[key], params[key])
                items = sorted(params['inputs'].items())
                keys, values = zip(*items)
                self.assertEqual(('job_ini', 'site_model'), keys)
                self.assertEqual((job_config, site_model_input), values)

                # checking that warnings work
                self.assertEqual(warn.call_args[0][0],
                                 "The parameter 'bar' is unknown, ignoring")
        finally:
            shutil.rmtree(temp_dir)
Example #9
0
def possibly_gunzip(fname):
    """
    A file can be .gzipped to save space (this happens
    in the debian package); in that case, let's gunzip it.

    :param fname: a file name (not zipped)
    """
    is_gz = os.path.exists(fname) and fname.endswith('.gz')
    there_is_gz = not os.path.exists(fname) and os.path.exists(fname + '.gz')
    if is_gz:
        return writetmp(gzip.open(fname).read())
    elif there_is_gz:
        return writetmp(gzip.open(fname + '.gz').read())
    return fname
Example #10
0
def possibly_gunzip(fname):
    """
    A file can be .gzipped to save space (this happens
    in the debian package); in that case, let's gunzip it.

    :param fname: a file name (not zipped)
    """
    is_gz = os.path.exists(fname) and fname.endswith('.gz')
    there_is_gz = not os.path.exists(fname) and os.path.exists(fname + '.gz')
    if is_gz:
        return writetmp(gzip.open(fname).read())
    elif there_is_gz:
        return writetmp(gzip.open(fname + '.gz').read())
    return fname
Example #11
0
def _assert_equal_sources(nodes):
    if hasattr(nodes[0], 'source_id'):
        n0 = nodes[0]
        for n in nodes[1:]:
            n.assert_equal(n0, ignore=('id', 'src_group_id'))
    else:  # assume source nodes
        n0 = nodes[0].to_str()
        for n in nodes[1:]:
            eq = n.to_str() == n0
            if not eq:
                f0 = writetmp(n0)
                f1 = writetmp(n.to_str())
            assert eq, 'different parameters for source %s, run meld %s %s' % (
                n['id'], f0, f1)
    return nodes
Example #12
0
    def test_lr_eq_0_cov_gt_0(self):
        # If a vulnerability function loss ratio is 0 and its corresponding CoV
        # is > 0, a ValueError should be raised
        vuln_content = writetmp(u"""\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns="http://openquake.org/xmlns/nrml/0.4"
      xmlns:gml="http://www.opengis.net/gml">
    <vulnerabilityModel>
        <discreteVulnerabilitySet vulnerabilitySetID="PAGER"
                                  assetCategory="population"
                                  lossCategory="occupants">
            <IML IMT="PGV">0.005 0.007 0.0098 0.0137</IML>
            <discreteVulnerability vulnerabilityFunctionID="A"
                                   probabilisticDistribution="LN">
                <lossRatio>0.00 0.06 0.18 0.36</lossRatio>
                <coefficientsVariation>0.30 0.30 0.30 0.30
                </coefficientsVariation>
            </discreteVulnerability>
        </discreteVulnerabilitySet>
    </vulnerabilityModel>
</nrml>
""")
        with self.assertRaises(ValueError) as ar:
            nrml.to_python(vuln_content)
        self.assertIn(
            'It is not valid to define a loss ratio = 0.0 with a '
            'corresponding coeff. of variation > 0.0', str(ar.exception))
Example #13
0
 def test_case_master(self):
     # this tests exercise the case of a complex logic tree; it also
     # prints the warning on poe_agg very different from the expected poe
     self.run_calc(case_master.__file__, 'job.ini')
     fname = writetmp(view('mean_disagg', self.calc.datastore))
     self.assertEqualFiles('expected/mean_disagg.rst', fname)
     os.remove(fname)
Example #14
0
    def test_missing_line(self):
        fname = general.writetmp('''\
col=00|ses=0001|src=test|rup=001-00,0 1,3.05128000E-01 6.04032000E-01
col=00|ses=0001|src=test|rup=001-01,0 1,2.67031000E-01 3.34878000E-01
''')
        with self.assertRaises(readinput.InvalidFile):
            readinput.get_gmfs_from_csv(self.oqparam, self.sitecol, fname)
Example #15
0
    def test(self):
        fname = general.writetmp('''\
<?xml version="1.0" encoding="utf-8"?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">

    <!-- Spectral Acceleration (SA) example -->
    <hazardCurves sourceModelTreePath="b1_b2_b4" gsimTreePath="b1_b2" investigationTime="50.0" IMT="SA" saPeriod="0.025" saDamping="5.0">
        <IMLs>5.0000e-03 7.0000e-03 1.3700e-02</IMLs>

        <hazardCurve>
            <gml:Point>
                <gml:pos>-122.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8728e-01 9.8266e-01 9.4957e-01</poEs>
        </hazardCurve>
        <hazardCurve>
            <gml:Point>
                <gml:pos>-123.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8727e-02 9.8265e-02 9.4956e-02</poEs>
        </hazardCurve>
    </hazardCurves>

    <!-- Basic example, using PGA as IMT -->
    <hazardCurves sourceModelTreePath="b1_b2_b3" gsimTreePath="b1_b7" investigationTime="50.0" IMT="PGA">
        <IMLs>5.0000e-03 7.0000e-03 1.3700e-02 3.3700e-02</IMLs>

        <hazardCurve>
            <gml:Point>
                <gml:pos>-122.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8728e-01 9.8226e-01 9.4947e-01 9.2947e-01</poEs>
        </hazardCurve>
        <hazardCurve>
            <gml:Point>
                <gml:pos>-123.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8728e-02 9.8216e-02 9.4945e-02 9.2947e-02</poEs>
        </hazardCurve>
    </hazardCurves>
</nrml>
''',
                                 suffix='.xml')
        oqparam = object.__new__(oqvalidation.OqParam)
        oqparam.inputs = dict(hazard_curves=fname)
        sitecol, pmap = readinput.get_pmap(oqparam)
        self.assertEqual(len(sitecol), 2)
        self.assertEqual(sorted(oqparam.hazard_imtls.items()),
                         [('PGA', [0.005, 0.007, 0.0137, 0.0337]),
                          ('SA(0.025)', [0.005, 0.007, 0.0137])])
        hcurves = pmap.convert(oqparam.imtls, 2)
        assert_allclose(
            hcurves['PGA'],
            numpy.array([[0.098728, 0.098216, 0.094945, 0.092947],
                         [0.98728, 0.98226, 0.94947, 0.92947]]))
        assert_allclose(
            hcurves['SA(0.025)'],
            numpy.array([[0.098727, 0.098265, 0.094956],
                         [0.98728, 0.98266, 0.94957]]))
Example #16
0
    def test_lr_eq_0_cov_gt_0(self):
        # If a vulnerability function loss ratio is 0 and its corresponding CoV
        # is > 0, a ValueError should be raised
        vuln_content = writetmp(u"""\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns="http://openquake.org/xmlns/nrml/0.4"
      xmlns:gml="http://www.opengis.net/gml">
    <vulnerabilityModel>
        <discreteVulnerabilitySet vulnerabilitySetID="PAGER"
                                  assetCategory="population"
                                  lossCategory="occupants">
            <IML IMT="PGV">0.005 0.007 0.0098 0.0137</IML>
            <discreteVulnerability vulnerabilityFunctionID="A"
                                   probabilisticDistribution="LN">
                <lossRatio>0.00 0.06 0.18 0.36</lossRatio>
                <coefficientsVariation>0.30 0.30 0.30 0.30
                </coefficientsVariation>
            </discreteVulnerability>
        </discreteVulnerabilitySet>
    </vulnerabilityModel>
</nrml>
""")
        with self.assertRaises(ValueError) as ar:
            nrml.parse(vuln_content)
        self.assertIn('It is not valid to define a loss ratio = 0.0 with a '
                      'corresponding coeff. of variation > 0.0',
                      str(ar.exception))
Example #17
0
    def test_get_oqparam_no_files(self):
        # sections are there just for documentation
        # when we parse the file, we ignore these
        source = general.writetmp("""
[general]
calculation_mode = classical_risk
region = 1 1, 2 2, 3 3
[foo]
bar = baz
intensity_measure_types = PGA
export_dir = %s
        """ % TMP)
        exp_base_path = os.path.dirname(source)

        expected_params = {
            'export_dir': TMP,
            'base_path': exp_base_path,
            'calculation_mode': 'classical_risk',
            'region': [(1.0, 1.0), (2.0, 2.0), (3.0, 3.0)],
            'inputs': {},
            'intensity_measure_types_and_levels': {'PGA': None},
        }
        # checking that warnings work
        with mock.patch('logging.warn') as warn:
            oqparam = readinput.get_oqparam(source)
        self.assertEqual(warn.call_args[0][0],
                         "The parameter 'bar' is unknown, ignoring")
        self.assertEqual(expected_params, vars(oqparam))
Example #18
0
    def test_missing_line(self):
        self.oqparam.inputs['gmfs'] = general.writetmp('''\
col=00|ses=0001|src=test|rup=001-00,0 1,3.05128000E-01 6.04032000E-01
col=00|ses=0001|src=test|rup=001-01,0 1,2.67031000E-01 3.34878000E-01
''')
        with self.assertRaises(readinput.InvalidFile):
            readinput.get_gmfs(self.oqparam, self.sitecol)
Example #19
0
 def __fromh5__(self, dic, attrs):
     # TODO: this is called more times than needed, maybe we should cache it
     sg_data = group_array(dic['sg_data'], 'sm_id')
     sm_data = dic['sm_data']
     vars(self).update(attrs)
     self.gsim_fname = decode(self.gsim_fname)
     if self.gsim_fname.endswith('.xml'):
         # otherwise it would look in the current directory
         GMPETable.GMPE_DIR = os.path.dirname(self.gsim_fname)
         trts = sorted(self.trts)
         tmp = writetmp(self.gsim_lt_xml, suffix='.xml')
         self.gsim_lt = logictree.GsimLogicTree(tmp, trts)
     else:  # fake file with the name of the GSIM
         self.gsim_lt = logictree.GsimLogicTree.from_(self.gsim_fname)
     self.source_models = []
     for sm_id, rec in enumerate(sm_data):
         tdata = sg_data[sm_id]
         srcgroups = [
             sourceconverter.SourceGroup(
                 self.trts[data['trti']], id=data['grp_id'],
                 eff_ruptures=data['effrup'], tot_ruptures=get_totrup(data))
             for data in tdata if data['effrup']]
         path = tuple(str(decode(rec['path'])).split('_'))
         trts = set(sg.trt for sg in srcgroups)
         sm = logictree.SourceModel(
             rec['name'], rec['weight'], path, srcgroups,
             rec['num_rlzs'], sm_id, rec['samples'])
         self.source_models.append(sm)
     self.init()
     try:
         os.remove(tmp)  # gsim_lt file
     except NameError:  # tmp is defined only in the regular case, see above
         pass
Example #20
0
    def test_event_based(self):
        self.run_calc(ucerf.__file__, 'job.ini')
        [fname] = export(('ruptures', 'csv'), self.calc.datastore)
        # check that we get the expected number of events
        with open(fname) as f:
            self.assertEqual(len(f.readlines()), 37)
        self.assertEqualFiles('expected/ruptures.csv', fname, lastline=20)

        # run a regular event based on top of the UCERF ruptures and
        # check the generated hazard maps
        self.run_calc(ucerf.__file__,
                      'job.ini',
                      calculation_mode='event_based',
                      hazard_calculation_id=str(self.calc.datastore.calc_id))

        # check the GMFs
        gmdata = self.calc.datastore['gmdata'].value
        got = writetmp(rst_table(gmdata, fmt='%.6f'))
        self.assertEqualFiles('expected/gmdata_eb.csv', got)

        # check the mean hazard map
        [fname] = [
            f for f in export(('hmaps', 'csv'), self.calc.datastore)
            if 'mean' in f
        ]
        self.assertEqualFiles('expected/hazard_map-mean.csv', fname)
    def test_get_oqparam_with_sites_csv(self):
        sites_csv = general.writetmp('1.0,2.1\n3.0,4.1\n5.0,6.1')
        try:
            source = general.writetmp("""
[general]
calculation_mode = classical
[geometry]
sites_csv = %s
[misc]
maximum_distance=1
truncation_level=3
random_seed=5
[site_params]
reference_vs30_type = measured
reference_vs30_value = 600.0
reference_depth_to_2pt5km_per_sec = 5.0
reference_depth_to_1pt0km_per_sec = 100.0
intensity_measure_types_and_levels = {'PGA': [0.1, 0.2]}
investigation_time = 50.
export_dir = %s
            """ % (sites_csv, TMP))
            exp_base_path = os.path.dirname(
                os.path.join(os.path.abspath('.'), source))

            expected_params = {
                'export_dir': TMP,
                'base_path': exp_base_path,
                'calculation_mode': 'classical',
                'truncation_level': 3.0,
                'random_seed': 5,
                'maximum_distance': 1.0,
                'inputs': {'job_ini': source,
                           'sites': sites_csv},
                'reference_depth_to_1pt0km_per_sec': 100.0,
                'reference_depth_to_2pt5km_per_sec': 5.0,
                'reference_vs30_type': 'measured',
                'reference_vs30_value': 600.0,
                'hazard_imtls': {'PGA': [0.1, 0.2]},
                'risk_imtls': {},
                'investigation_time': 50.0,
                'risk_investigation_time': 50.0,
            }

            params = vars(readinput.get_oqparam(source))
            self.assertEqual(expected_params, params)
        finally:
            os.unlink(sites_csv)
Example #22
0
    def test_negative_indices(self):
        self.oqparam.inputs['gmfs'] = general.writetmp('''\
col=00|ses=0001|src=test|rup=001-00,0 -1,1.59434000E-01 3.92602000E-01
col=00|ses=0001|src=test|rup=001-01,0 1,3.05128000E-01 6.04032000E-01
col=00|ses=0001|src=test|rup=001-02,0 1,2.67031000E-01 3.34878000E-01
''')
        with self.assertRaises(readinput.InvalidFile):
            readinput.get_gmfs(self.oqparam, self.sitecol)
Example #23
0
    def test_negative_indices(self):
        fname = general.writetmp('''\
col=00|ses=0001|src=test|rup=001-00,0 -1,1.59434000E-01 3.92602000E-01
col=00|ses=0001|src=test|rup=001-01,0 1,3.05128000E-01 6.04032000E-01
col=00|ses=0001|src=test|rup=001-02,0 1,2.67031000E-01 3.34878000E-01
''')
        with self.assertRaises(readinput.InvalidFile):
            readinput.get_gmfs_from_csv(self.oqparam, self.sitecol, fname)
Example #24
0
    def test_not_ordered_tags(self):
        self.oqparam.inputs['gmfs'] = general.writetmp('''\
col=00|ses=0001|src=test|rup=001-02,0 1,1.59434000E-01 3.92602000E-01
col=00|ses=0001|src=test|rup=001-00,0 1,3.05128000E-01 6.04032000E-01
col=00|ses=0001|src=test|rup=001-01,0 1,2.67031000E-01 3.34878000E-01
''')
        with self.assertRaises(readinput.InvalidFile):
            readinput.get_gmfs(self.oqparam, self.sitecol)
Example #25
0
    def test_missing_bad_indices(self):
        self.oqparam.inputs['gmfs'] = general.writetmp('''\
col=00|ses=0001|src=test|rup=001-00,,1.59434000E-01 3.92602000E-01
col=00|ses=0001|src=test|rup=001-01,0 1,3.05128000E-01 6.04032000E-01
col=00|ses=0001|src=test|rup=001-02,X,2.67031000E-01
''')
        with self.assertRaises(readinput.InvalidFile):
            readinput.get_gmfs(self.oqparam, self.sitecol)
Example #26
0
    def test_missing_line(self):
        fname = general.writetmp('''\
0 0,0 1
col=00|ses=0001|src=test|rup=001-00,0 1,3.05128000E-01 6.04032000E-01
col=00|ses=0001|src=test|rup=001-01,0 1,2.67031000E-01 3.34878000E-01
''')
        with self.assertRaises(readinput.InvalidFile):
            readinput.get_gmfs_from_txt(self.oqparam, fname)
Example #27
0
class FakeParams(hdf5.LiteralAttrs):
    export_dir = '/tmp'
    hazard_calculation_id = None
    inputs = {'job_ini': writetmp('fake_job.ini')}
    concurrent_tasks = 0

    def to_params(self):
        return {}
Example #28
0
    def test_not_ordered_etags(self):
        fname = general.writetmp('''\
0 0,0 1
col=00|ses=0001|src=test|rup=001-02,0 1,1.59434000E-01 3.92602000E-01
col=00|ses=0001|src=test|rup=001-00,0 1,3.05128000E-01 6.04032000E-01
col=00|ses=0001|src=test|rup=001-01,0 1,2.67031000E-01 3.34878000E-01
''')
        with self.assertRaises(readinput.InvalidFile):
            readinput.get_gmfs_from_txt(self.oqparam, fname)
Example #29
0
def get_path(fname_or_fileobject):
    if isinstance(fname_or_fileobject, str):
        return fname_or_fileobject
    elif hasattr(fname_or_fileobject, 'getvalue'):
        return writetmp(fname_or_fileobject.getvalue())
    elif hasattr(fname_or_fileobject, 'name'):
        return fname_or_fileobject.name
    else:
        return TypeError(fname_or_fileobject)
Example #30
0
 def test_ebr_2(self):
     self.run_calc(ebr_2.__file__, 'job_ebrisk.ini', exports='csv')
     fname = writetmp(view('mean_avg_losses', self.calc.datastore))
     self.assertEqualFiles('expected/avg_losses.txt', fname)
     alt = self.calc.datastore['agg_loss_table']
     self.assertEqual(len(alt), 20)
     self.assertEqual(set(alt['rlzi']), set([0]))  # single rlzi
     totloss = alt['loss'].sum()
     aae(totloss, 20210.27, decimal=2)
Example #31
0
    def test_case_4(self):
        # this test is sensitive to the ordering of the epsilons
        # in openquake.riskinput.make_eps
        out = self.run_calc(case_4.__file__, 'job.ini', exports='csv')
        fname = writetmp(view('totlosses', self.calc.datastore))
        self.assertEqualFiles('expected/totlosses.txt', fname)

        [fname] = out['agglosses-rlzs', 'csv']
        self.assertEqualFiles('expected/agglosses.csv', fname)
Example #32
0
    def test_event_based_risk(self):
        self.run_calc(ucerf.__file__, 'job_ebr.ini',
                      number_of_logic_tree_samples='2')

        fname = writetmp(view('portfolio_loss', self.calc.datastore))
        self.assertEqualFiles('expected/portfolio_loss.txt', fname)

        # make sure this runs
        view('fullreport', self.calc.datastore)
Example #33
0
 def get(cls, path, **data):
     resp = cls.c.get('/v1/calc/%s' % path, data, HTTP_HOST='127.0.0.1')
     assert resp.content
     try:
         return json.loads(resp.content.decode('utf8'))
     except:
         print('Invalid JSON, see %s' % writetmp(resp.content),
               file=sys.stderr)
         return {}
Example #34
0
    def test_case_4(self):
        # this test is sensitive to the ordering of the epsilons
        # in openquake.riskinput.make_eps
        out = self.run_calc(case_4.__file__, 'job.ini', exports='csv')
        fname = writetmp(view('totlosses', self.calc.datastore))
        self.assertEqualFiles('expected/totlosses.txt', fname)

        [fname] = out['agglosses-rlzs', 'csv']
        self.assertEqualFiles('expected/agglosses.csv', fname, delta=1E-6)
Example #35
0
 def test_case_miriam(self):
     # this is a case with a grid and asset-hazard association
     out = self.run_calc(case_miriam.__file__, 'job.ini', exports='csv')
     [fname] = out['agg_loss_table', 'csv']
     self.assertEqualFiles('expected/agg_losses-rlz000-structural.csv',
                           fname)
     fname = writetmp(view('portfolio_loss', self.calc.datastore))
     self.assertEqualFiles(
         'expected/portfolio_loss.txt', fname, delta=1E-5)
Example #36
0
    def test_not_ordered_eids(self):
        fname = general.writetmp('''\
0 0,0 1
col=00|ses=0001|src=test|rup=001-02,0 1,1.59434000E-01 3.92602000E-01
col=00|ses=0001|src=test|rup=001-00,0 1,3.05128000E-01 6.04032000E-01
col=00|ses=0001|src=test|rup=001-01,0 1,2.67031000E-01 3.34878000E-01
''')
        with self.assertRaises(readinput.InvalidFile):
            readinput.get_gmfs_from_txt(self.oqparam, fname)
Example #37
0
    def test_case_master(self):
        if sys.platform == 'darwin':
            raise unittest.SkipTest('MacOSX')
        self.run_calc(case_master.__file__, 'job.ini', exports='csv')
        fnames = export(('avg_losses-stats', 'csv'), self.calc.datastore)
        assert fnames, 'avg_losses-stats not exported?'
        for fname in fnames:
            self.assertEqualFiles('expected/' + strip_calc_id(fname),
                                  fname,
                                  delta=1E-5)

        # extract loss_curves/rlz-1 (with the first asset having zero losses)
        [fname] = export(('loss_curves/rlz-1', 'csv'), self.calc.datastore)
        self.assertEqualFiles('expected/' + strip_calc_id(fname),
                              fname,
                              delta=1E-5)

        fnames = export(('loss_maps-rlzs', 'csv'), self.calc.datastore)
        assert fnames, 'loss_maps-rlzs not exported?'
        if REFERENCE_OS:
            for fname in fnames:
                self.assertEqualFiles('expected/' + strip_calc_id(fname),
                                      fname,
                                      delta=1E-5)

        # extract curves by tag
        tags = ['taxonomy=tax1', 'state=01', 'cresta=0.11']
        a = extract(self.calc.datastore, 'aggcurves/structural', *tags)
        self.assertEqual(a.array.shape, (4, 2))  # 4 stats, 2 return periods

        fname = writetmp(view('portfolio_loss', self.calc.datastore))
        self.assertEqualFiles('expected/portfolio_loss.txt', fname, delta=1E-5)
        os.remove(fname)

        # check ruptures are stored correctly
        fname = writetmp(view('ruptures_events', self.calc.datastore))
        self.assertEqualFiles('expected/ruptures_events.txt', fname)
        os.remove(fname)

        # check job_info is stored
        job_info = {decode(k) for k in dict(self.calc.datastore['job_info'])}
        self.assertIn('build_curves_maps.sent', job_info)
        self.assertIn('build_curves_maps.received', job_info)
        check_total_losses(self.calc)
Example #38
0
 def test_case_miriam(self):
     # this is a case with a grid and asset-hazard association
     out = self.run_calc(case_miriam.__file__, 'job.ini', exports='csv')
     [fname] = out['agg_loss_table', 'csv']
     self.assertEqualFiles('expected/agg_losses-rlz000-structural.csv',
                           fname, delta=1E-5)
     fname = writetmp(view('portfolio_loss', self.calc.datastore))
     self.assertEqualFiles(
         'expected/portfolio_loss.txt', fname, delta=1E-5)
     os.remove(fname)
Example #39
0
    def test_invalid_magnitude_distance_filter(self):
        source = general.writetmp("""
[general]
maximum_distance=[(200, 8)]
""")
        with self.assertRaises(ValueError) as ctx:
            readinput.get_oqparam(source)
        self.assertIn('magnitude 200.0 is bigger than the maximum (11): '
                      'could not convert to maximum_distance:',
                      str(ctx.exception))
Example #40
0
    def test_no_nrml(self):
        fname = writetmp('''\
<?xml version="1.0" encoding="UTF-8"?>
<fragilityModel id="Ethiopia" assetCategory="buildings"
lossCategory="structural" />
''')
        with self.assertRaises(ValueError) as ctx:
            read(fname)
        self.assertIn('expected a node of kind nrml, got fragilityModel',
                      str(ctx.exception))
Example #41
0
    def test_missing_indices_are_ok(self):
        self.oqparam.inputs['gmfs'] = general.writetmp('''\
col=00|ses=0001|src=test|rup=001-00,,1.59434000E-01 3.92602000E-01
col=00|ses=0001|src=test|rup=001-01,0 1,3.05128000E-01 6.04032000E-01
col=00|ses=0001|src=test|rup=001-02,0,2.67031000E-01
''')
        gmfs = readinput.get_gmfs(self.oqparam, self.sitecol)
        gmvs1, gmvs2 = gmfs['PGA']
        assert_allclose(gmvs1, [0.159434, 0.305128, 0.267031])
        assert_allclose(gmvs2, [0.392602, 0.604032, 0.])
Example #42
0
 def get(cls, path, **params):
     resp = requests.get("http://%s/v1/calc/%s" % (cls.hostport, path), params=params)
     if not resp.text:
         sys.stderr.write(open(cls.errfname).read())
         return {}
     try:
         return json.loads(resp.text)
     except:
         print("Invalid JSON, see %s" % writetmp(resp.text), file=sys.stderr)
         return {}
Example #43
0
    def test_gmf_ok(self):
        fname = general.writetmp('''\
0 0,0 1
col=00|ses=0001|src=test|rup=001-00,0 1,3.05128000E-01 6.04032000E-01
col=00|ses=0001|src=test|rup=001-01,0 1,2.67031000E-01 3.34878000E-01
col=00|ses=0001|src=test|rup=001-02,0 1,1.59434000E-01 3.92602000E-01
''')
        _, _, gmfs = readinput.get_gmfs_from_txt(self.oqparam, fname)
        gmvs1, gmvs2 = gmfs['PGA']
        assert_allclose(gmvs1, [0.305128, 0.267031, 0.159434])
        assert_allclose(gmvs2, [0.604032, 0.334878, 0.392602])
Example #44
0
    def test_case_5(self):
        # test with different curve resolution for different taxonomies
        self.run_calc(case_5.__file__, 'job_h.ini,job_r.ini')

        # check mean loss curves
        [fname] = export(('loss_curves-stats', 'xml'), self.calc.datastore)
        self.assertEqualFiles('expected/loss_curves-mean.xml', fname)

        # check individual avg losses
        fname = writetmp(view('loss_curves_avg', self.calc.datastore))
        self.assertEqualFiles('expected/loss_curves_avg.txt', fname)
Example #45
0
    def test_gmf_ok(self):
        fname = general.writetmp('''\
0 0,0 1
col=00|ses=0001|src=test|rup=001-00,0 1,3.05128000E-01 6.04032000E-01
col=00|ses=0001|src=test|rup=001-01,0 1,2.67031000E-01 3.34878000E-01
col=00|ses=0001|src=test|rup=001-02,0 1,1.59434000E-01 3.92602000E-01
''')
        _, _, gmfs = readinput.get_gmfs_from_txt(self.oqparam, fname)
        gmvs1, gmvs2 = gmfs['PGA']
        assert_allclose(gmvs1, [0.305128, 0.267031, 0.159434])
        assert_allclose(gmvs2, [0.604032, 0.334878, 0.392602])
Example #46
0
    def test_case_2(self):
        self.assert_stats_ok(case_2, 'job.ini', individual_curves='true')
        fname = writetmp(view('mean_avg_losses', self.calc.datastore))
        self.assertEqualFiles('expected/mean_avg_losses.txt', fname)

        # test the case when all GMFs are filtered out
        with self.assertRaises(RuntimeError) as ctx:
            self.run_calc(case_2.__file__, 'job.ini', minimum_intensity='10.0')
        self.assertEqual(
            str(ctx.exception),
            'No GMFs were generated, perhaps they were all below the '
            'minimum_intensity threshold')
Example #47
0
    def test_assetcol(self):
        expected = writetmp('''\
asset_ref:|S20:,site_id:uint32:,structural:float64:,deductible~structural:float64:,insurance_limit~structural:float64:
a0,0,3000,25,100
a1,1,2000,0.1,0.2
a2,2,1000,0.02,0.08
a3,2,5000,1000,3000
a4,3,500000,1000,3000
''')
        assetcol = riskinput.build_asset_collection(self.assets_by_site)
        numpy.testing.assert_equal(
            assetcol, readers.read_composite_array(expected))
Example #48
0
    def test_case_6a(self):
        # case with two gsims
        out = self.run_calc(case_6a.__file__, 'job_haz.ini,job_risk.ini',
                            exports='csv')
        f1, f2 = out['agglosses-rlzs', 'csv']
        self.assertEqualFiles('expected/agg-gsimltp_b1_structural.csv', f1)
        self.assertEqualFiles('expected/agg-gsimltp_b2_structural.csv', f2)

        # testing the totlosses view
        dstore = self.calc.datastore
        fname = writetmp(view('totlosses', dstore))
        self.assertEqualFiles('expected/totlosses.txt', fname)
    def test_assetcol(self):
        expected = writetmp('''\
asset_ref:|S100:,site_id:uint32:,taxonomy:uint32:,fatalities:float64:,structural:float64:,deductible~structural:float64:,insurance_limit~structural:float64:
a0,0,1,10,3000,.25,1.0
a1,1,0,20,2000,0.25,0.5
a2,2,2,30,1000,0.2,0.8
a3,2,1,0,5000,2.0,6.0
a4,3,1,50,500000,2.0,6.0
''')
        assetcol = riskinput.build_asset_collection(self.assets_by_site)
        numpy.testing.assert_equal(
            assetcol, readers.read_composite_array(expected))
Example #50
0
    def test_assetcol(self):
        expected = writetmp('''\
idx:uint32,lon,lat,site_id:uint32,taxonomy:uint32:,number,area,occupants:float64:,structural:float64:,deductible-structural:float64:,insurance_limit-structural:float64:
0,8.12985001E+01,2.91098003E+01,0,1,3.00000000E+00,1.00000000E+01,1.00000000E+01,1.00000000E+02,2.50000000E+01,1.00000000E+02
1,8.30822983E+01,2.79006004E+01,1,0,5.00000000E+02,1.00000000E+01,2.00000000E+01,4.00000000E-01,1.00000000E-01,2.00000000E-01
''')
        assetcol = riskinput.AssetCollection(self.assets_by_site, None, None)
        numpy.testing.assert_equal(
            assetcol.array, writers.read_composite_array(expected))

        # pickleability
        pickle.loads(pickle.dumps(assetcol))
Example #51
0
    def test_event_based_risk(self):
        if h5py.__version__ < '2.6.0':
            raise unittest.SkipTest  # UCERF requires vlen arrays
        self.run_calc(ucerf.__file__, 'job_ebr.ini',
                      number_of_logic_tree_samples='2')

        fnames = export(('agg_loss_table', 'csv'), self.calc.datastore)
        for fname in fnames:
            self.assertEqualFiles('expected/%s' % strip_calc_id(fname), fname)

        fname = writetmp(view('portfolio_loss', self.calc.datastore))
        self.assertEqualFiles('expected/portfolio_loss.txt', fname)
Example #52
0
    def test(self):
        fname = general.writetmp('''\
<?xml version="1.0" encoding="utf-8"?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">

    <!-- Spectral Acceleration (SA) example -->
    <hazardCurves sourceModelTreePath="b1_b2_b4" gsimTreePath="b1_b2" investigationTime="50.0" IMT="SA" saPeriod="0.025" saDamping="5.0">
        <IMLs>5.0000e-03 7.0000e-03 1.3700e-02</IMLs>

        <hazardCurve>
            <gml:Point>
                <gml:pos>-122.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8728e-01 9.8266e-01 9.4957e-01</poEs>
        </hazardCurve>
        <hazardCurve>
            <gml:Point>
                <gml:pos>-123.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8727e-02 9.8265e-02 9.4956e-02</poEs>
        </hazardCurve>
    </hazardCurves>

    <!-- Basic example, using PGA as IMT -->
    <hazardCurves sourceModelTreePath="b1_b2_b3" gsimTreePath="b1_b7" investigationTime="50.0" IMT="PGA">
        <IMLs>5.0000e-03 7.0000e-03 1.3700e-02 3.3700e-02</IMLs>

        <hazardCurve>
            <gml:Point>
                <gml:pos>-122.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8728e-01 9.8226e-01 9.4947e-01 9.2947e-01</poEs>
        </hazardCurve>
        <hazardCurve>
            <gml:Point>
                <gml:pos>-123.5000 37.5000</gml:pos>
            </gml:Point>
            <poEs>9.8728e-02 9.8216e-02 9.4945e-02 9.2947e-02</poEs>
        </hazardCurve>
    </hazardCurves>
</nrml>
''', suffix='.xml')
        oqparam = mock.Mock()
        oqparam.inputs = dict(hazard_curves=fname)
        sitecol, hcurves = readinput.get_hcurves(oqparam)
        self.assertEqual(len(sitecol), 2)
        self.assertEqual(sorted(oqparam.hazard_imtls.items()),
                         [('PGA', [0.005, 0.007, 0.0137, 0.0337]),
                          ('SA(0.025)', [0.005, 0.007, 0.0137])])
        self.assertEqual(str(hcurves), '''\
[([0.098727, 0.098265, 0.094956], [0.098728, 0.098216, 0.094945, 0.092947])
 ([0.98728, 0.98266, 0.94957], [0.98728, 0.98226, 0.94947, 0.92947])]''')
Example #53
0
 def to_nrml(self, key, data, fname=None, fmt=FIVEDIGITS):
     """
     :param key:
      `dmg_dist_per_asset|dmg_dist_per_taxonomy|dmg_dist_total|collapse_map`
     :param data: sequence of rows to serialize
     :fname: the path name of the output file; if None, build a name
     :returns: path name of the saved file
     """
     fname = fname or writetmp()
     node = getattr(self, key + '_node')(data)
     with open(fname, 'wb') as out:
         nrml.write([node], out, fmt)
     return fname
Example #54
0
def validate_nrml(request):
    """
    Leverage oq-risklib to check if a given XML text is a valid NRML

    :param request:
        a `django.http.HttpRequest` object containing the mandatory
        parameter 'xml_text': the text of the XML to be validated as NRML

    :returns: a JSON object, containing:
        * 'valid': a boolean indicating if the provided text is a valid NRML
        * 'error_msg': the error message, if any error was found
                       (None otherwise)
        * 'error_line': line of the given XML where the error was found
                        (None if no error was found or if it was not a
                        validation error)
    """
    xml_text = request.POST.get('xml_text')
    if not xml_text:
        return HttpResponseBadRequest(
            'Please provide the "xml_text" parameter')
    xml_file = writetmp(xml_text, suffix='.xml')
    try:
        nrml.read(xml_file)
    except etree.ParseError as exc:
        return _make_response(error_msg=exc.message.message,
                              error_line=exc.message.lineno,
                              valid=False)
    except Exception as exc:
        # get the exception message
        exc_msg = exc.args[0]
        if isinstance(exc_msg, bytes):
            exc_msg = exc_msg.decode('utf-8')   # make it a unicode object
        elif isinstance(exc_msg, unicode):
            pass
        else:
            # if it is another kind of object, it is not obvious a priori how
            # to extract the error line from it
            return _make_response(
                error_msg=unicode(exc_msg), error_line=None, valid=False)
        # if the line is not mentioned, the whole message is taken
        error_msg = exc_msg.split(', line')[0]
        # check if the exc_msg contains a line number indication
        search_match = re.search(r'line \d+', exc_msg)
        if search_match:
            error_line = int(search_match.group(0).split()[1])
        else:
            error_line = None
        return _make_response(
            error_msg=error_msg, error_line=error_line, valid=False)
    else:
        return _make_response(error_msg=None, error_line=None, valid=True)
Example #55
0
    def test_get_oqparam_with_files(self):
        temp_dir = tempfile.mkdtemp()
        site_model_input = general.writetmp(dir=temp_dir, content="foo")
        job_config = general.writetmp(dir=temp_dir, content="""
[general]
calculation_mode = classical
[site]
sites = 0 0
site_model_file = %s
maximum_distance=1
truncation_level=0
random_seed=0
intensity_measure_types = PGA
export_dir = %s
        """ % (site_model_input, TMP))

        try:
            exp_base_path = os.path.dirname(job_config)

            expected_params = {
                'export_dir': TMP,
                'base_path': exp_base_path,
                'calculation_mode': 'classical',
                'truncation_level': 0.0,
                'random_seed': 0,
                'maximum_distance': 1.0,
                'inputs': {'site_model': site_model_input},
                'sites': [(0.0, 0.0)],
                'intensity_measure_types_and_levels': {'PGA': None},
            }

            params = vars(readinput.get_oqparam(job_config))
            self.assertEqual(expected_params, params)
            self.assertEqual(['site_model'], params['inputs'].keys())
            self.assertEqual([site_model_input], params['inputs'].values())
        finally:
            shutil.rmtree(temp_dir)
Example #56
0
    def test_case_master_ebr(self):
        out = self.run_calc(case_master.__file__, 'job.ini',
                            calculation_mode='ebrisk',
                            investigation_time='1',
                            insured_losses='false',
                            exports='csv')
        for fname in out['losses_by_taxon', 'csv']:
            self.assertEqualFiles('expected/%s' % strip_calc_id(fname), fname)

        for fname in out['agg_loss_table', 'csv']:
            self.assertEqualFiles('expected/%s' % strip_calc_id(fname), fname)

        fname = writetmp(view('portfolio_loss', self.calc.datastore))
        self.assertEqualFiles(
            'expected/portfolio_loss_ebr.txt', fname, delta=1E-5)
    def test_get_rlzs_assoc(self):
        two_trts = general.writetmp("""\
<?xml version='1.0' encoding='utf-8'?>
<nrml xmlns:gml="http://www.opengis.net/gml"
      xmlns="http://openquake.org/xmlns/nrml/0.4">
    <logicTree logicTreeID='lt1'>
        <logicTreeBranchingLevel branchingLevelID="bl1">
            <logicTreeBranchSet uncertaintyType="gmpeModel" branchSetID="bs1"
                    applyToTectonicRegionType="Active Shallow Crust">

                <logicTreeBranch branchID="b1">
                    <uncertaintyModel>BooreAtkinson2008</uncertaintyModel>
                    <uncertaintyWeight>0.75</uncertaintyWeight>
                </logicTreeBranch>
                <logicTreeBranch branchID="b2">
                    <uncertaintyModel>ChiouYoungs2008</uncertaintyModel>
                    <uncertaintyWeight>0.25</uncertaintyWeight>
                </logicTreeBranch>

            </logicTreeBranchSet>
        </logicTreeBranchingLevel>

        <logicTreeBranchingLevel branchingLevelID="bl2">
            <logicTreeBranchSet uncertaintyType="gmpeModel" branchSetID="bs2"
                    applyToTectonicRegionType="Active Deep Crust">

                <logicTreeBranch branchID="b1">
                    <uncertaintyModel>BooreAtkinson2008</uncertaintyModel>
                    <uncertaintyWeight>0.75</uncertaintyWeight>
                </logicTreeBranch>

                <logicTreeBranch branchID="b2">
                    <uncertaintyModel>ChiouYoungs2008</uncertaintyModel>
                    <uncertaintyWeight>0.25</uncertaintyWeight>
                </logicTreeBranch>

            </logicTreeBranchSet>
        </logicTreeBranchingLevel>
    </logicTree>
</nrml>""")
        oqparam = mock.Mock()
        oqparam.base_path = '/'
        oqparam.inputs = dict(gsim_logic_tree=two_trts)
        with self.assertRaises(readinput.InvalidFile) as ctx:
            readinput.get_rlzs_assoc(oqparam)
        self.assertIn('must contain a single tectonic region type',
                      str(ctx.exception))
Example #58
0
    def test_risk_default_export_type(self):
        with mock.patch('openquake.engine.export.core.export') as export:
            with mock.patch('openquake.engine.db.models'
                            '.Output.objects.get') as output_get:
                output_get.return_value.output_type = 'test_output'
                output_get.return_value.oq_job.status = 'complete'
                export.return_value = writetmp('Fake result file content')

                request = self.factory.get('/v1/calc/result/37')
                response = views.get_result(request, 37)

                self.assertEqual(200, response.status_code)
                self.assertEqual('Fake result file content', response.content)

                self.assertEqual(1, export.call_count)
                self.assertEqual(37, export.call_args[0][0])
                self.assertEqual('xml', export.call_args[1]['export_type'])