def test_invalid_magnitude_distance_filter(self): source = general.gettemp(""" [general] maximum_distance=[(200, 8)] """) with self.assertRaises(ValueError) as ctx: readinput.get_oqparam(source) self.assertIn('magnitude 200.0 is bigger than the maximum (11): ' 'could not convert to maximum_distance:', str(ctx.exception))
def run2(job_haz, job_risk, concurrent_tasks, pdb, exports, monitor): """ Run both hazard and risk, one after the other """ hcalc = base.calculators(readinput.get_oqparam(job_haz), monitor) with monitor: hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports) hc_id = hcalc.datastore.calc_id oq = readinput.get_oqparam(job_risk, hc_id=hc_id) rcalc = base.calculators(oq, monitor) rcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports, hazard_calculation_id=hc_id) return rcalc
def run2(job_haz, job_risk, calc_id, concurrent_tasks, pdb, loglevel, exports, params): """ Run both hazard and risk, one after the other """ hcalc = base.calculators(readinput.get_oqparam(job_haz), calc_id) hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports, **params) hc_id = hcalc.datastore.calc_id rcalc_id = logs.init(level=getattr(logging, loglevel.upper())) oq = readinput.get_oqparam(job_risk, hc_id=hc_id) rcalc = base.calculators(oq, rcalc_id) rcalc.run(pdb=pdb, exports=exports, **params) return rcalc
def test_wrong_sites_csv(self): sites_csv = general.gettemp( 'site_id,lon,lat\n1,1.0,2.1\n2,3.0,4.1\n3,5.0,6.1') source = general.gettemp(""" [general] calculation_mode = scenario [geometry] sites_csv = %s [misc] maximum_distance=1 truncation_level=3 random_seed=5 [site_params] reference_vs30_type = measured reference_vs30_value = 600.0 reference_depth_to_2pt5km_per_sec = 5.0 reference_depth_to_1pt0km_per_sec = 100.0 intensity_measure_types_and_levels = {'PGA': [0.1, 0.2]} export_dir = %s """ % (os.path.basename(sites_csv), TMP)) oq = readinput.get_oqparam(source) with self.assertRaises(InvalidFile) as ctx: readinput.get_mesh(oq) self.assertIn('expected site_id=0, got 1', str(ctx.exception)) os.unlink(sites_csv)
def checksum(thing): """ Get the checksum of a calculation from the calculation ID (if already done) or from the job.ini/job.zip file (if not done yet). If `thing` is a source model logic tree file, get the checksum of the model by ignoring the job.ini, the gmpe logic tree file and possibly other files. """ try: job_id = int(thing) job_file = None except ValueError: job_id = None job_file = thing if not os.path.exists(job_file): sys.exit('%s does not correspond to an existing file' % job_file) if job_id: dstore = util.read(job_id) checksum = dstore['/'].attrs['checksum32'] elif job_file.endswith('.xml'): # assume it is a smlt file inputs = {'source_model_logic_tree': job_file} checksum = readinput.get_checksum32(mock.Mock(inputs=inputs)) else: oq = readinput.get_oqparam(job_file) checksum = readinput.get_checksum32(oq) print(checksum)
def build_report(job_ini, output_dir=None): """ Write a `report.csv` file with information about the calculation without running it :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory) """ calc_id = logs.init() oq = readinput.get_oqparam(job_ini) if oq.calculation_mode == 'classical': oq.calculation_mode = 'preclassical' oq.ground_motion_fields = False output_dir = output_dir or os.path.dirname(job_ini) from openquake.calculators import base # ugly calc = base.calculators(oq, calc_id) calc.save_params() # needed to save oqparam # some taken is care so that the real calculation is not run: # the goal is to extract information about the source management only calc.pre_execute() if oq.calculation_mode == 'preclassical': calc.execute() rw = ReportWriter(calc.datastore) rw.make_report() report = (os.path.join(output_dir, 'report.rst') if output_dir else calc.datastore.export_path('report.rst')) try: rw.save(report) except IOError as exc: # permission error sys.stderr.write(str(exc) + '\n') readinput.exposure = None # ugly hack return report
def build_report(job_ini, output_dir=None): """ Write a `report.csv` file with information about the calculation without running it :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory) """ oq = readinput.get_oqparam(job_ini) output_dir = output_dir or os.path.dirname(job_ini) from openquake.calculators import base # ugly calc = base.calculators(oq) calc.save_params() # needed to save oqparam # some taken is care so that the real calculation is not run: # the goal is to extract information about the source management only with mock.patch.object(PSHACalculator, 'core_task', count_eff_ruptures): calc.pre_execute() if hasattr(calc, '_composite_source_model'): calc.datastore['csm_info'] = calc.csm.info rw = ReportWriter(calc.datastore) rw.make_report() report = (os.path.join(output_dir, 'report.rst') if output_dir else calc.datastore.export_path('report.rst')) try: rw.save(report) except IOError as exc: # permission error sys.stderr.write(str(exc) + '\n') return report
def test_get_oqparam_no_files(self): # sections are there just for documentation # when we parse the file, we ignore these source = general.writetmp(""" [general] calculation_mode = classical_risk region = 1 1, 2 2, 3 3 [foo] bar = baz intensity_measure_types = PGA export_dir = %s """ % TMP) exp_base_path = os.path.dirname(source) expected_params = { 'export_dir': TMP, 'base_path': exp_base_path, 'calculation_mode': 'classical_risk', 'region': [(1.0, 1.0), (2.0, 2.0), (3.0, 3.0)], 'inputs': {}, 'intensity_measure_types_and_levels': {'PGA': None}, } # checking that warnings work with mock.patch('logging.warn') as warn: oqparam = readinput.get_oqparam(source) self.assertEqual(warn.call_args[0][0], "The parameter 'bar' is unknown, ignoring") self.assertEqual(expected_params, vars(oqparam))
def run(job_ini, concurrent_tasks=None, loglevel='info', hc=None, exports=''): """ Run a calculation. Optionally, set the number of concurrent_tasks (0 to disable the parallelization). """ logging.basicConfig(level=getattr(logging, loglevel.upper())) job_inis = job_ini.split(',') assert len(job_inis) in (1, 2), job_inis monitor = performance.Monitor('total', measuremem=True) if len(job_inis) == 1: # run hazard or risk oqparam = readinput.get_oqparam(job_inis[0], hc_id=hc) if hc and hc < 0: # interpret negative calculation ids calc_ids = datastore.get_calc_ids() try: hc = calc_ids[hc] except IndexError: raise SystemExit('There are %d old calculations, cannot ' 'retrieve the %s' % (len(calc_ids), hc)) calc = base.calculators(oqparam, monitor) monitor.monitor_dir = calc.datastore.calc_dir with monitor: calc.run(concurrent_tasks=concurrent_tasks, exports=exports, hazard_calculation_id=hc) else: # run hazard + risk calc = run2( job_inis[0], job_inis[1], concurrent_tasks, exports, monitor) logging.info('Total time spent: %s s', monitor.duration) logging.info('Memory allocated: %s', general.humansize(monitor.mem)) monitor.flush() print('See the output with hdfview %s/output.hdf5' % calc.datastore.calc_dir) return calc
def setUpClass(cls): cls.oqparam = readinput.get_oqparam('job_loss.ini', pkg=case_2) cls.oqparam.insured_losses = True cls.sitecol, cls.assets_by_site = readinput.get_sitecol_assets( cls.oqparam, readinput.get_exposure(cls.oqparam)) rmdict = riskmodels.get_risk_models(cls.oqparam) cls.riskmodel = readinput.get_risk_model(cls.oqparam, rmdict)
def build_report(job_ini, output_dir=None): """ Write a `report.csv` file with information about the calculation without running it :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory) """ oq = readinput.get_oqparam(job_ini) output_dir = output_dir or os.path.dirname(job_ini) from openquake.calculators import base # ugly calc = base.calculators(oq) calc.save_params() # needed to save oqparam # some taken is care so that the real calculation is not run: # the goal is to extract information about the source management only p = mock.patch.object with p(PSHACalculator, 'core_task', count_ruptures): if calc.pre_calculator == 'event_based_risk': # compute the ruptures only, not the risk calc.pre_calculator = 'event_based_rupture' calc.pre_execute() if hasattr(calc, 'csm'): calc.datastore['csm_info'] = calc.csm.info rw = ReportWriter(calc.datastore) rw.make_report() report = (os.path.join(output_dir, 'report.rst') if output_dir else calc.datastore.export_path('report.rst')) try: rw.save(report) except IOError as exc: # permission error sys.stderr.write(str(exc) + '\n') return report
def test_grid_site_model_exposure(self): oq = readinput.get_oqparam('job.ini', case_16) oq.region_grid_spacing = 15 sitecol, assetcol, discarded = readinput.get_sitecol_assetcol(oq) self.assertEqual(len(sitecol), 141) # 10 sites were discarded silently self.assertEqual(len(assetcol), 151) self.assertEqual(len(discarded), 0) # no assets were discarded
def test_wrong_trts(self): # 'active Shallow Crust' is missing, 'Active Shallow Crust' is there oq = readinput.get_oqparam('job.ini', case_16) with self.assertRaises(logictree.InvalidLogicTree) as c: readinput.get_gsim_lt(oq, ['active Shallow Crust']) self.assertIn("is missing the TRT 'active Shallow Crust'", str(c.exception))
def build_report(job_ini, output_dir=None): """ Write a `report.csv` file with information about the calculation without running it :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory) """ oq = readinput.get_oqparam(job_ini) output_dir = output_dir or os.path.dirname(job_ini) calc = base.calculators(oq) # some taken is care so that the real calculation is not run: # the goal is to extract information about the source management only with mock.patch.object( calc.__class__, 'core_task', source.count_eff_ruptures): calc.pre_execute() with mock.patch.object(logging.root, 'info'): # reduce logging calc.execute() calc.save_params() rw = ReportWriter(calc.datastore) rw.make_report() report = (os.path.join(output_dir, 'report.rst') if output_dir else calc.datastore.export_path('report.rst')) try: rw.save(report) except IOError as exc: # permission error sys.stderr.write(str(exc) + '\n') return report
def job_from_file_lite(cfg_file, username, log_level='info', exports='', **extras): """ Create a full job profile from a job config file. :param str cfg_file: Path to the job.ini files. :param str username: The user who will own this job profile and all results. :param str log_level: Desired log level. :param exports: Comma-separated sting of desired export types. :params extras: Extra parameters (used only in the tests to override the params) :returns: :class:`openquake.engine.db.models.OqJob` object :raises: `RuntimeError` if the input job configuration is not valid """ from openquake.commonlib.calculators import base # create the current job job = create_job(user_name=username, log_level=log_level) models.JobStats.objects.create(oq_job=job) with logs.handle(job, log_level): # read calculation params and create the calculation profile params = readinput.get_params([cfg_file]) params.update(extras) # build and validate an OqParam object oqparam = readinput.get_oqparam(params, calculators=base.calculators) job.save_params(vars(oqparam)) job.save() return job
def test_wrong_sites_csv(self): sites_csv = general.gettemp( 'site_id,lon,lat\n1,1.0,2.1\n2,3.0,4.1\n3,5.0,6.1') source = general.gettemp(""" [general] calculation_mode = scenario [geometry] sites_csv = %s [misc] maximum_distance=1 truncation_level=3 random_seed=5 [site_params] reference_vs30_type = measured reference_vs30_value = 600.0 reference_depth_to_2pt5km_per_sec = 5.0 reference_depth_to_1pt0km_per_sec = 100.0 intensity_measure_types_and_levels = {'PGA': [0.1, 0.2]} investigation_time = 50. export_dir = %s """ % (os.path.basename(sites_csv), TMP)) oq = readinput.get_oqparam(source) with self.assertRaises(InvalidFile) as ctx: readinput.get_mesh(oq) self.assertIn('expected site_id=0, got 1', str(ctx.exception)) os.unlink(sites_csv)
def build_report(job_ini, output_dir=None): """ Write a `report.csv` file with information about the calculation. :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory) """ oq = readinput.get_oqparam(job_ini) output_dir = output_dir or os.path.dirname(job_ini) calc = base.calculators(oq) calc.pre_execute() ds = datastore.DataStore(calc.datastore.calc_id) rw = ReportWriter(ds) report = os.path.join(output_dir, 'report.rst') for name in ('params', 'inputs'): rw.add(name) if 'scenario' not in oq.calculation_mode: rw.add('csm_info') rw.add('rlzs_assoc', calc.rlzs_assoc) if 'num_ruptures' in ds: rw.add('rupture_collections') rw.add('col_rlz_assocs') if oq.calculation_mode in ('classical', 'event_based', 'ebr'): rw.add('data_transfer') rw.save(report) return report
def test_exposure_only(self): oq = readinput.get_oqparam('job.ini', case_16) del oq.inputs['site_model'] sitecol, assetcol, discarded = readinput.get_sitecol_assetcol(oq) self.assertEqual(len(sitecol), 148) self.assertEqual(len(assetcol), 151) self.assertEqual(len(discarded), 0) # test agg_value arr = assetcol.agg_value() assert_allclose(arr, [3.6306637e+09]) arr = assetcol.agg_value('taxonomy') assert_allclose(arr, [[4.9882240e+06], [1.1328099e+08], [4.2222912e+08], [1.6412870e+07], [5.0686808e+07], [2.5343402e+07], [1.5254313e+09], [6.6375590e+06], [8.3206810e+08], [1.6412871e+07], [3.9439158e+08], [1.6734690e+07], [6.7582400e+06], [1.3613027e+08], [4.3124016e+07], [9.4132640e+06], [1.0620092e+07]]) arr = assetcol.agg_value('occupancy') assert_allclose(assetcol.agg_value('occupancy'), [[3.6306644e+09]]) arr = assetcol.agg_value('taxonomy', 'occupancy') self.assertEqual(arr.shape, (17, 1, 1))
def main(thing): """ Get the checksum of a calculation from the calculation ID (if already done) or from the job.ini/job.zip file (if not done yet). If `thing` is a source model logic tree file, get the checksum of the model by ignoring the job.ini, the gmpe logic tree file and possibly other files. """ try: job_id = int(thing) job_file = None except ValueError: job_id = None job_file = thing if not os.path.exists(job_file): sys.exit('%s does not correspond to an existing file' % job_file) if job_id: dstore = util.read(job_id) checksum = dstore['/'].attrs['checksum32'] elif job_file.endswith('.xml'): # assume it is a smlt file inputs = {'source_model_logic_tree': job_file} checksum = readinput.get_checksum32( mock.Mock(inputs=inputs, random_seed=42)) else: oq = readinput.get_oqparam(job_file) checksum = readinput.get_checksum32(oq) print(checksum)
def test_grid_site_model_exposure(self): oq = readinput.get_oqparam( 'job.ini', case_16, region_grid_spacing='15') sitecol, assetcol, discarded = readinput.get_sitecol_assetcol(oq) self.assertEqual(len(sitecol), 148) # 3 sites were discarded silently self.assertEqual(len(assetcol), 151) self.assertEqual(len(discarded), 0) # no assets were discarded
def test_applyToSources(self): oq = readinput.get_oqparam('job.ini', case_21) with mock.patch('logging.info') as info: readinput.get_composite_source_model(oq) self.assertEqual( info.call_args[0], ('Applied %d changes to the composite source model', 81))
def test_wrong_trts(self): # invalid TRT in job.ini [reqv] oq = readinput.get_oqparam('job.ini', case_2) fname = oq.inputs['reqv'].pop('active shallow crust') oq.inputs['reqv']['act shallow crust'] = fname with self.assertRaises(ValueError) as ctx: readinput.get_composite_source_model(oq, in_memory=False) self.assertIn('Unknown TRT=act shallow crust', str(ctx.exception))
def run2(job_haz, job_risk, concurrent_tasks, pdb, exports, monitor): """ Run both hazard and risk, one after the other """ hcalc = base.calculators(readinput.get_oqparam(job_haz), monitor) with monitor: monitor.monitor_dir = hcalc.datastore.calc_dir hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports) hc_id = hcalc.datastore.calc_id oq = readinput.get_oqparam(job_risk, hc_id=hc_id) rcalc = base.calculators(oq, monitor) monitor.monitor_dir = rcalc.datastore.calc_dir rcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports, hazard_calculation_id=hc_id) return rcalc
def test_extra_large_source(self): oq = readinput.get_oqparam('job.ini', case_21) with mock.patch('logging.error') as error, datastore.hdf5new() as h5: with mock.patch('openquake.hazardlib.geo.utils.MAX_EXTENT', 80): readinput.get_composite_source_model(oq, h5) os.remove(h5.filename) self.assertEqual( error.call_args[0][0], 'source SFLT2: too large: 84 km')
def test_applyToSources(self): oq = readinput.get_oqparam('job.ini', case_21) with mock.patch('logging.info') as info: with mock.patch.dict(os.environ, OQ_DISTRIBUTE='no'): readinput.get_composite_source_model(oq) self.assertEqual( info.call_args[0], ('Applied %d changes to the composite source model', 81))
def get_calc(job_ini, calc_id): """ Factory function returning a Calculator instance :param job_ini: path to job.ini file :param calc_id: calculation ID """ return calculators(readinput.get_oqparam(job_ini), calc_id)
def test_applyToSources(self): oq = readinput.get_oqparam('job.ini', case_21) oq.prefilter_sources = 'no' with mock.patch('logging.info') as info: readinput.get_composite_source_model(oq) self.assertEqual( info.call_args[0], ('Applied %d changes to the composite source model', 81))
def run2(job_haz, job_risk, calc_id, concurrent_tasks, pdb, loglevel, exports, params): """ Run both hazard and risk, one after the other """ hcalc = base.calculators(readinput.get_oqparam(job_haz), calc_id) hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports, **params) hcalc.datastore.close() hc_id = hcalc.datastore.calc_id rcalc_id = logs.init(level=getattr(logging, loglevel.upper())) oq = readinput.get_oqparam(job_risk, hc_id=hc_id) rcalc = base.calculators(oq, rcalc_id) rcalc.run(pdb=pdb, exports=exports, **params) return rcalc
def test_wrong_trts_in_reqv(self): # invalid TRT in job.ini [reqv] oq = readinput.get_oqparam('job.ini', case_2) fname = oq.inputs['reqv'].pop('active shallow crust') oq.inputs['reqv']['act shallow crust'] = fname with self.assertRaises(ValueError) as ctx: readinput.get_composite_source_model(oq) self.assertIn('Unknown TRT=act shallow crust', str(ctx.exception))
def test_site_amplification(self): oq = readinput.get_oqparam('job.ini', case_16) oq.inputs['amplification'] = os.path.join(oq.base_path, 'invalid_amplification.csv') df = readinput.get_amplification(oq) with self.assertRaises(ValueError) as ctx: site_amplification.Amplifier(oq.imtls, df) self.assertIn("Found duplicates for (b'F', 0.2)", str(ctx.exception))
def test_get_oqparam_with_files(self): temp_dir = tempfile.mkdtemp() site_model_input = general.writetmp(dir=temp_dir, content="foo") job_config = general.writetmp(dir=temp_dir, content=""" [general] calculation_mode = event_based [foo] bar = baz [site] sites = 0 0 site_model_file = %s maximum_distance=1 truncation_level=0 random_seed=0 intensity_measure_types = PGA investigation_time = 50 export_dir = %s """ % (site_model_input, TMP)) try: exp_base_path = os.path.dirname(job_config) expected_params = { 'export_dir': TMP, 'base_path': exp_base_path, 'calculation_mode': 'event_based', 'truncation_level': 0.0, 'random_seed': 0, 'maximum_distance': { 'default': 1 }, 'inputs': { 'job_ini': job_config, 'site_model': site_model_input }, 'sites': [(0.0, 0.0, 0.0)], 'hazard_imtls': { 'PGA': None }, 'investigation_time': 50.0, 'risk_investigation_time': 50.0, } with mock.patch('logging.warn') as warn: params = getparams(readinput.get_oqparam(job_config)) for key in expected_params: self.assertEqual(expected_params[key], params[key]) items = sorted(params['inputs'].items()) keys, values = zip(*items) self.assertEqual(('job_ini', 'site_model'), keys) self.assertEqual((job_config, site_model_input), values) # checking that warnings work self.assertEqual(warn.call_args[0][0], "The parameter 'bar' is unknown, ignoring") finally: shutil.rmtree(temp_dir)
def check_input(job_ini_or_zip_or_nrml): if job_ini_or_zip_or_nrml.endswith('.xml'): try: print(nrml.to_python(job_ini_or_zip_or_nrml)) except Exception as exc: sys.exit(exc) else: calc = base.calculators(readinput.get_oqparam(job_ini_or_zip_or_nrml)) calc.read_inputs()
def run2(job_haz, job_risk, calc_id, concurrent_tasks, pdb, reuse_input, loglevel, exports, params): """ Run both hazard and risk, one after the other """ oq = readinput.get_oqparam(job_haz, kw=params) hcalc = base.calculators(oq, calc_id) hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports) hcalc.datastore.close() hc_id = hcalc.datastore.calc_id rcalc_id = logs.init(level=getattr(logging, loglevel.upper())) params['hazard_calculation_id'] = str(hc_id) oq = readinput.get_oqparam(job_risk, kw=params) rcalc = base.calculators(oq, rcalc_id) if reuse_input: # enable caching oq.cachedir = datastore.get_datadir() rcalc.run(pdb=pdb, exports=exports) return rcalc
def test_extra_large_source(self): oq = readinput.get_oqparam('job.ini', case_21) mon = performance.Monitor('csm', datastore.hdf5new()) with mock.patch('logging.error') as error: with mock.patch('openquake.hazardlib.geo.utils.MAX_EXTENT', 80): readinput.get_composite_source_model(oq, mon) mon.hdf5.close() os.remove(mon.hdf5.path) self.assertEqual(error.call_args[0][0], 'source SFLT2: too large: 84 km')
def test(self): job_ini = os.path.join(os.path.dirname(case_15.__file__), 'job.ini') oq = readinput.get_oqparam(job_ini) lt = readinput.get_logic_tree(oq) # (2+1) x 4 = 12 realizations paths = [rlz.lt_path for rlz in lt] self.assertEqual(paths, [ 'A.AA', 'A.AB', 'A.BA', 'A.BB', 'BAAA', 'BAAB', 'BABA', 'BABB', 'BBAA', 'BBAB', 'BBBA', 'BBBB' ])
def print_full_lt(fname): """ Parse the composite source model and prints information about its composition and the full logic tree """ oqparam = readinput.get_oqparam(fname) full_lt = readinput.get_full_lt(oqparam) print(full_lt) print('See http://docs.openquake.org/oq-engine/stable/' 'effective-realizations.html for an explanation')
def test_missing_cost_types(self): job_ini = general.gettemp('''\ [general] description = Exposure with missing cost_types calculation_mode = scenario exposure_file = %s''' % os.path.basename(self.exposure4)) oqparam = readinput.get_oqparam(job_ini) with self.assertRaises(InvalidFile) as ctx: readinput.get_sitecol_assetcol(oqparam, cost_types=['structural']) self.assertIn("is missing", str(ctx.exception))
def test_site_amplification(self): oq = readinput.get_oqparam('job.ini', case_16) oq.inputs['amplification'] = os.path.join(oq.base_path, 'invalid_amplification.csv') with self.assertRaises(InvalidFile) as ctx: readinput.get_amplification(oq) self.assertIn( "levels for b'F' [1.0e-03 1.0e-02 5.0e-02 1.0e-01 2.0e-01 1.6e+00]" " instead of [1.0e-03 1.0e-02 5.0e-02 1.0e-01 2.0e-01 5.0e-01" " 1.6e+00]", str(ctx.exception))
def run2(job_haz, job_risk, concurrent_tasks, pdb, exports, params): """ Run both hazard and risk, one after the other """ hcalc = base.calculators(readinput.get_oqparam(job_haz)) hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports, **params) hc_id = hcalc.datastore.calc_id oq = readinput.get_oqparam(job_risk, hc_id=hc_id) rcalc = base.calculators(oq) # disable concurrency in the second calculation to avoid fork issues rcalc.run(concurrent_tasks=0, pdb=pdb, exports=exports, hazard_calculation_id=hc_id, **params) return rcalc
def print_csm_info(fname): """ Parse the composite source model without instantiating the sources and prints information about its composition and the full logic tree """ oqparam = readinput.get_oqparam(fname) csm = readinput.get_composite_source_model(oqparam, in_memory=False) print(csm.info) print('See https://github.com/gem/oq-risklib/blob/master/doc/' 'effective-realizations.rst for an explanation') print(csm.info.get_rlzs_assoc())
def test_get_oqparam_with_sites_csv(self): sites_csv = general.writetmp('1.0,2.1\n3.0,4.1\n5.0,6.1') try: source = general.writetmp(""" [general] calculation_mode = classical [geometry] sites_csv = %s [misc] maximum_distance=1 truncation_level=3 random_seed=5 [site_params] reference_vs30_type = measured reference_vs30_value = 600.0 reference_depth_to_2pt5km_per_sec = 5.0 reference_depth_to_1pt0km_per_sec = 100.0 intensity_measure_types_and_levels = {'PGA': [0.1, 0.2]} investigation_time = 50. export_dir = %s """ % (os.path.basename(sites_csv), TMP)) exp_base_path = os.path.dirname( os.path.join(os.path.abspath('.'), source)) expected_params = { 'export_dir': TMP, 'hazard_calculation_id': 1, 'base_path': exp_base_path, 'calculation_mode': 'classical', 'truncation_level': 3.0, 'random_seed': 5, 'maximum_distance': { 'default': 1.0 }, 'inputs': { 'job_ini': source, 'sites': sites_csv }, 'reference_depth_to_1pt0km_per_sec': 100.0, 'reference_depth_to_2pt5km_per_sec': 5.0, 'reference_vs30_type': 'measured', 'reference_vs30_value': 600.0, 'hazard_imtls': { 'PGA': [0.1, 0.2] }, 'investigation_time': 50.0, 'risk_investigation_time': 50.0, } params = getparams(readinput.get_oqparam(source, hc_id=1)) self.assertEqual(expected_params, params) finally: os.unlink(sites_csv)
def test_get_oqparam_with_files(self): temp_dir = tempfile.mkdtemp() source_model_input = general.writetmp(dir=temp_dir) site_model_input = general.writetmp(dir=temp_dir, content="foo") job_config = general.writetmp(dir=temp_dir, content=""" [general] calculation_mode = event_based [site] sites = 0 0 source_model_file = %s site_model_file = %s maximum_distance=1 truncation_level=0 random_seed=0 intensity_measure_types = PGA investigation_time = 50 export_dir = %s """ % (os.path.basename(source_model_input), os.path.basename(site_model_input), TMP)) try: exp_base_path = os.path.dirname(job_config) expected_params = { 'export_dir': TMP, 'base_path': exp_base_path, 'calculation_mode': 'event_based', 'truncation_level': 0.0, 'random_seed': 0, 'maximum_distance': {'default': 1}, 'inputs': {'job_ini': job_config, 'site_model': site_model_input, 'source': [source_model_input], 'source_model': source_model_input}, 'sites': [(0.0, 0.0, 0.0)], 'hazard_imtls': {'PGA': None}, 'investigation_time': 50.0, 'risk_investigation_time': 50.0, } params = getparams(readinput.get_oqparam(job_config)) for key in expected_params: self.assertEqual(expected_params[key], params[key]) items = sorted(params['inputs'].items()) keys, values = zip(*items) self.assertEqual(('job_ini', 'site_model', 'source', 'source_model'), keys) self.assertEqual((job_config, site_model_input, [source_model_input], source_model_input), values) finally: shutil.rmtree(temp_dir)
def zip(job_ini, archive_zip, extra=(), oq=None, log=logging.info): """ Zip the given job.ini file into the given archive, together with all related files. """ if not os.path.exists(job_ini): sys.exit('%s does not exist' % job_ini) if isinstance(archive_zip, str): # actually it should be path-like if not archive_zip.endswith('.zip'): sys.exit('%s does not end with .zip' % archive_zip) if os.path.exists(archive_zip): sys.exit('%s exists already' % archive_zip) logging.basicConfig(level=logging.INFO) # do not validate to avoid permissions error on the export_dir oq = oq or readinput.get_oqparam(job_ini, validate=False) files = set(os.path.abspath(fname) for fname in extra) # collect .hdf5 tables for the GSIMs, if any if 'gsim_logic_tree' in oq.inputs or oq.gsim: gsim_lt = readinput.get_gsim_lt(oq) for gsims in gsim_lt.values.values(): for gsim in gsims: table = getattr(gsim, 'GMPE_TABLE', None) if table: files.add(table) # collect exposure.csv, if any exposure_xml = oq.inputs.get('exposure') if exposure_xml: dname = os.path.dirname(exposure_xml) expo = nrml.read(exposure_xml, stop='asset')[0] if not expo.assets: exposure_csv = (~expo.assets).strip() for csv in exposure_csv.split(): if csv and os.path.exists(os.path.join(dname, csv)): files.add(os.path.join(dname, csv)) # collection .hdf5 UCERF file, if any if oq.calculation_mode.startswith('ucerf_'): sm = nrml.read(oq.inputs['source_model']) fname = sm.sourceModel.UCERFSource['filename'] f = os.path.join(os.path.dirname(oq.inputs['source_model']), fname) files.add(os.path.normpath(f)) # collect all other files for key in oq.inputs: fname = oq.inputs[key] if isinstance(fname, list): for f in fname: files.add(os.path.normpath(f)) else: files.add(os.path.normpath(fname)) general.zipfiles(files, archive_zip, log=log)
def test_oversampling(self): from openquake.qa_tests_data.classical import case_17 oq = readinput.get_oqparam( os.path.join(os.path.dirname(case_17.__file__), 'job.ini')) csm = readinput.get_composite_source_model(oq) # check FullLogicTree serialization dic, attrs = csm.info.__toh5__() new = object.__new__(FullLogicTree) new.__fromh5__(dic, attrs) self.assertEqual(repr(new), repr(csm.info). replace('0.6000000000000001', '0.6'))
def get_calc(self, testfile, job_ini): """ Return the outputs of the calculation as a dictionary """ self.testdir = os.path.dirname(testfile) ini = os.path.join(self.testdir, job_ini) oq = self.oqparam = readinput.get_oqparam(ini) oq.concurrent_tasks = 0 # to make the test debuggable monitor = PerformanceMonitor( self.testdir, monitor_csv=os.path.join(oq.export_dir, 'performance_csv')) return calculators(self.oqparam, monitor)
def test_get_oqparam_with_sites_csv(self): sites_csv = general.gettemp('1.0,2.1\n3.0,4.1\n5.0,6.1') try: source = general.gettemp(""" [general] calculation_mode = scenario [geometry] sites_csv = %s [misc] maximum_distance=1 truncation_level=3 random_seed=5 [site_params] reference_vs30_type = measured reference_vs30_value = 600.0 reference_depth_to_2pt5km_per_sec = 5.0 reference_depth_to_1pt0km_per_sec = 100.0 intensity_measure_types_and_levels = {'PGA': [0.1, 0.2]} export_dir = %s """ % (os.path.basename(sites_csv), TMP)) exp_base_path = os.path.dirname( os.path.join(os.path.abspath('.'), source)) expected_params = { 'all_cost_types': [], 'export_dir': TMP, 'base_path': exp_base_path, 'calculation_mode': 'scenario', 'complex_fault_mesh_spacing': 5.0, 'truncation_level': 3.0, 'random_seed': 5, 'collapse_level': 0, 'maximum_distance': { 'default': [(1, 1), (10, 1)] }, 'inputs': { 'job_ini': source, 'sites': sites_csv }, 'reference_depth_to_1pt0km_per_sec': 100.0, 'reference_depth_to_2pt5km_per_sec': 5.0, 'reference_vs30_type': 'measured', 'reference_vs30_value': 600.0, 'hazard_imtls': { 'PGA': [0.1, 0.2] }, 'risk_investigation_time': None, 'minimum_asset_loss': {}, } params = getparams(readinput.get_oqparam(source, validate=1)) self.assertEqual(expected_params, params) finally: os.unlink(sites_csv)
def test_get_oqparam_with_files(self): temp_dir = tempfile.mkdtemp() site_model_input = general.writetmp(dir=temp_dir, content="foo") job_config = general.writetmp(dir=temp_dir, content=""" [general] calculation_mode = event_based [foo] bar = baz [site] sites = 0 0 site_model_file = %s maximum_distance=1 truncation_level=0 random_seed=0 intensity_measure_types = PGA investigation_time = 50 export_dir = %s """ % (site_model_input, TMP)) try: exp_base_path = os.path.dirname(job_config) expected_params = { 'export_dir': TMP, 'base_path': exp_base_path, 'calculation_mode': 'event_based', 'truncation_level': 0.0, 'random_seed': 0, 'maximum_distance': {'default': 1}, 'inputs': {'job_ini': job_config, 'site_model': site_model_input}, 'sites': [(0.0, 0.0)], 'hazard_imtls': {'PGA': None}, 'investigation_time': 50.0, 'risk_investigation_time': 50.0, } with mock.patch('logging.warn') as warn: params = getparams(readinput.get_oqparam(job_config)) for key in expected_params: self.assertEqual(expected_params[key], params[key]) items = sorted(params['inputs'].items()) keys, values = zip(*items) self.assertEqual(('job_ini', 'site_model'), keys) self.assertEqual((job_config, site_model_input), values) # checking that warnings work self.assertEqual(warn.call_args[0][0], "The parameter 'bar' is unknown, ignoring") finally: shutil.rmtree(temp_dir)
def print_csm_info(fname): """ Parse the composite source model without instantiating the sources and prints information about its composition and the full logic tree """ oqparam = readinput.get_oqparam(fname) csm = readinput.get_composite_source_model(oqparam, in_memory=False) print(csm.info) print('See http://docs.openquake.org/oq-engine/stable/' 'effective-realizations.html for an explanation') rlzs_assoc = csm.info.get_rlzs_assoc() print(rlzs_assoc) tot, pairs = get_pickled_sizes(rlzs_assoc) print(rst_table(pairs, ['attribute', 'nbytes']))
def print_csm_info(fname): """ Parse the composite source model without instantiating the sources and prints information about its composition and the full logic tree """ oqparam = readinput.get_oqparam(fname) csm = readinput.get_composite_source_model(oqparam, in_memory=False) print(csm.info) print('See https://github.com/gem/oq-risklib/blob/master/doc/' 'effective-realizations.rst for an explanation') rlzs_assoc = csm.info.get_rlzs_assoc() print(rlzs_assoc) tot, pairs = get_pickled_sizes(rlzs_assoc) print(views.rst_table(pairs, ['attribute', 'nbytes']))
def submit_job(request_files, ini, username, hc_id): """ Create a job object from the given files and run it in a new process. :returns: a job ID """ # build a LogContext object associated to a database job [job] = engine.create_jobs([ dict(calculation_mode='preclassical', description='Calculation waiting to start') ], config.distribution.log_level, None, username, hc_id) # store the request files and perform some validation try: job_ini = store(request_files, ini, job.calc_id) job.oqparam = oq = readinput.get_oqparam( job_ini, kw={'hazard_calculation_id': hc_id}) if oq.sensitivity_analysis: logs.dbcmd('set_status', job.calc_id, 'deleted') # hide it jobs = engine.create_jobs([job_ini], config.distribution.log_level, None, username, hc_id, True) else: dic = dict(calculation_mode=oq.calculation_mode, description=oq.description, hazard_calculation_id=hc_id) logs.dbcmd('update_job', job.calc_id, dic) jobs = [job] except Exception: tb = traceback.format_exc() logs.dbcmd('log', job.calc_id, datetime.utcnow(), 'CRITICAL', 'before starting', tb) logs.dbcmd('finish', job.calc_id, 'failed') raise custom_tmp = os.path.dirname(job_ini) submit_cmd = config.distribution.submit_cmd.split() big_job = oq.get_input_size() > int(config.distribution.min_input_size) if submit_cmd == ENGINE: # used for debugging for job in jobs: subprocess.Popen(submit_cmd + [save(job, custom_tmp)]) elif submit_cmd == KUBECTL and big_job: for job in jobs: with open(os.path.join(CWD, 'job.yaml')) as f: yaml = string.Template(f.read()).substitute( CALC_PIK=save(job, custom_tmp), CALC_NAME='calc%d' % job.calc_id) subprocess.run(submit_cmd, input=yaml.encode('ascii')) else: Process(target=engine.run_jobs, args=(jobs, )).start() return job.calc_id
def check(fname, pprint): """ Check the validity of NRML files and .ini files. Optionally, displays NRML files in indented format. """ if fname.endswith('.xml'): node = nrml.read(fname) if pprint: print node.to_str() elif fname.endswith('.ini'): logging.basicConfig(level=logging.INFO) oqparam = readinput.get_oqparam(fname) calculators.calculators(oqparam).pre_execute() if pprint: print oqparam
def test_get_oqparam_with_sites_csv(self): sites_csv = general.writetmp('1.0,2.1\n3.0,4.1\n5.0,6.1') try: source = general.writetmp(""" [general] calculation_mode = classical [geometry] sites_csv = %s [misc] maximum_distance=1 truncation_level=3 random_seed=5 [site_params] reference_vs30_type = measured reference_vs30_value = 600.0 reference_depth_to_2pt5km_per_sec = 5.0 reference_depth_to_1pt0km_per_sec = 100.0 intensity_measure_types_and_levels = {'PGA': [0.1, 0.2]} investigation_time = 50. export_dir = %s """ % (sites_csv, TMP)) exp_base_path = os.path.dirname( os.path.join(os.path.abspath('.'), source)) expected_params = { 'export_dir': TMP, 'base_path': exp_base_path, 'calculation_mode': 'classical', 'truncation_level': 3.0, 'random_seed': 5, 'maximum_distance': 1.0, 'inputs': {'job_ini': source, 'sites': sites_csv}, 'reference_depth_to_1pt0km_per_sec': 100.0, 'reference_depth_to_2pt5km_per_sec': 5.0, 'reference_vs30_type': 'measured', 'reference_vs30_value': 600.0, 'hazard_imtls': {'PGA': [0.1, 0.2]}, 'risk_imtls': {}, 'investigation_time': 50.0, 'risk_investigation_time': 50.0, } params = vars(readinput.get_oqparam(source)) self.assertEqual(expected_params, params) finally: os.unlink(sites_csv)
def print_csm_info(fname): """ Parse the composite source model without instantiating the sources and prints information about its composition and the full logic tree """ oqparam = readinput.get_oqparam(fname) csm = readinput.get_composite_source_model(oqparam, in_memory=False) print(csm.info) print('See http://docs.openquake.org/oq-engine/stable/' 'effective-realizations.html for an explanation') rlzs_assoc = csm.info.get_rlzs_assoc() print(rlzs_assoc) dupl = [(srcs[0]['id'], len(srcs)) for srcs in csm.check_dupl_sources()] if dupl: print(rst_table(dupl, ['source_id', 'multiplicity'])) tot, pairs = get_pickled_sizes(rlzs_assoc) print(rst_table(pairs, ['attribute', 'nbytes']))
def job_from_file(cfg_file, username, hazard_calculation_id=None): """ Create a full job profile from a job config file. :param str cfg_file: Path to a job.ini file. :param str username: The user who will own this job profile and all results. :param hazard_calculation_id: ID of a previous calculation or None :returns: a pair (job_id, oqparam) """ oq = readinput.get_oqparam(cfg_file) job = create_job(oq.calculation_mode, oq.description, username, hazard_calculation_id) return job.id, oq
def test_oversampling(self): from openquake.qa_tests_data.classical import case_17 oq = readinput.get_oqparam( os.path.join(os.path.dirname(case_17.__file__), 'job.ini')) sitecol = readinput.get_site_collection(oq) csm = readinput.get_composite_source_model(oq, sitecol) assoc = csm.info.get_rlzs_assoc() self.assertEqual( str(assoc), "<RlzsAssoc(size=2, rlzs=5)\n" "0,SadighEtAl1997: ['<0,b1,b1,w=0.2>']\n" "1,SadighEtAl1997: ['<1,b2,b1,w=0.2>', '<2,b2,b1,w=0.2>', '<3,b2,b1,w=0.2>', '<4,b2,b1,w=0.2>']>") # check CompositionInfo serialization array, attrs = assoc.csm_info.__toh5__() new = object.__new__(CompositionInfo) new.__fromh5__(array, attrs) self.assertEqual(repr(new), repr(assoc.csm_info))
def test_wrong_discretization(self): source = general.writetmp(""" [general] calculation_mode = classical region = 27.685048 85.280857, 27.736719 85.280857, 27.733376 85.355358, 27.675015 85.355358 region_grid_spacing = 5.0 maximum_distance=1 truncation_level=3 random_seed=5 reference_vs30_type = measured reference_vs30_value = 600.0 reference_depth_to_2pt5km_per_sec = 5.0 reference_depth_to_1pt0km_per_sec = 100.0 intensity_measure_types = PGA """) oqparam = readinput.get_oqparam(source) with self.assertRaises(ValueError) as ctx: readinput.get_site_collection(oqparam) self.assertIn('Could not discretize region', str(ctx.exception))
def test_oversampling(self): from openquake.qa_tests_data.classical import case_17 oq = readinput.get_oqparam( os.path.join(os.path.dirname(case_17.__file__), 'job.ini')) csm = readinput.get_composite_source_model(oq) csm.info.update_eff_ruptures(lambda tm: 1) assoc = csm.info.get_rlzs_assoc() self.assertEqual( str(assoc), "<RlzsAssoc(size=2, rlzs=5)\n" "0,'[SadighEtAl1997]': [0 1 2]\n" "1,'[SadighEtAl1997]': [3 4]>") # check CompositionInfo serialization dic, attrs = csm.info.__toh5__() new = object.__new__(CompositionInfo) new.__fromh5__(dic, attrs) self.assertEqual(repr(new), repr(csm.info). replace('0.20000000000000004', '0.2'))
def job_from_file(cfg_file, username, hazard_calculation_id=None): """ Create a full job profile from a job config file. :param str cfg_file: Path to a job.ini file. :param str username: The user who will own this job profile and all results :param str datadir: Data directory of the user :param hazard_calculation_id: ID of a previous calculation or None :returns: a pair (job_id, oqparam) """ oq = readinput.get_oqparam(cfg_file) job_id = logs.dbcmd('create_job', oq.calculation_mode, oq.description, username, datastore.DATADIR, hazard_calculation_id) return job_id, oq