def setUpClass(cls): connections['admin'].cursor() # open a connection cls.conn = connections['admin'].connection cls.output, cls.hc = import_gmf_scenario(test_data) cls.output.oq_job = OqJob.objects.create( user_name='openquake', hazard_calculation=cls.hc) # fake job cls.output.save()
def test_import_gmf_scenario(self): # gmfdata.xml is a file containing 2 IMTs, 5 ruptures and 3 sites fileobj = open(os.path.join(THISDIR, 'gmfdata.xml')) out = import_gmf_scenario.import_gmf_scenario(fileobj) hc = out.oq_job.get_oqparam() imts = sorted(hc.imtls) self.assertEqual(imts, ['PGA', 'PGV']) n = models.GmfData.objects.filter(gmf__output=out).count() assert_equal(hc.calculation_mode, 'scenario') assert_equal(hc.number_of_ground_motion_fields, 5) assert_equal(n, 30) # 30 rows entered, 2 x 5 x 3 assert_equal(hc.description, 'Scenario importer, file gmfdata.xml') # now test that exporting the imported data gives back the # original data [gmfset] = list(out.gmf) self.assertEqual(str(gmfset), '''\ GMFsPerSES(investigation_time=0.000000, stochastic_event_set_id=1, GMF(imt=PGA sa_period=None sa_damping=None rupture_id=scenario-0000000000 <X= 0.00000, Y= 0.00000, GMV=0.6824957> <X= 0.00000, Y= 0.10000, GMV=0.1270898> <X= 0.00000, Y= 0.20000, GMV=0.1603097>) GMF(imt=PGA sa_period=None sa_damping=None rupture_id=scenario-0000000001 <X= 0.00000, Y= 0.00000, GMV=0.3656627> <X= 0.00000, Y= 0.10000, GMV=0.2561813> <X= 0.00000, Y= 0.20000, GMV=0.1106853>) GMF(imt=PGA sa_period=None sa_damping=None rupture_id=scenario-0000000002 <X= 0.00000, Y= 0.00000, GMV=0.8700834> <X= 0.00000, Y= 0.10000, GMV=0.2106384> <X= 0.00000, Y= 0.20000, GMV=0.2232175>) GMF(imt=PGA sa_period=None sa_damping=None rupture_id=scenario-0000000003 <X= 0.00000, Y= 0.00000, GMV=0.3279292> <X= 0.00000, Y= 0.10000, GMV=0.2357552> <X= 0.00000, Y= 0.20000, GMV=0.1781143>) GMF(imt=PGA sa_period=None sa_damping=None rupture_id=scenario-0000000004 <X= 0.00000, Y= 0.00000, GMV=0.6968686> <X= 0.00000, Y= 0.10000, GMV=0.2581405> <X= 0.00000, Y= 0.20000, GMV=0.1351649>) GMF(imt=PGV sa_period=None sa_damping=None rupture_id=scenario-0000000000 <X= 0.00000, Y= 0.00000, GMV=0.6824957> <X= 0.00000, Y= 0.10000, GMV=0.1270898> <X= 0.00000, Y= 0.20000, GMV=0.1603097>) GMF(imt=PGV sa_period=None sa_damping=None rupture_id=scenario-0000000001 <X= 0.00000, Y= 0.00000, GMV=0.3656627> <X= 0.00000, Y= 0.10000, GMV=0.2561813> <X= 0.00000, Y= 0.20000, GMV=0.1106853>) GMF(imt=PGV sa_period=None sa_damping=None rupture_id=scenario-0000000002 <X= 0.00000, Y= 0.00000, GMV=0.8700834> <X= 0.00000, Y= 0.10000, GMV=0.2106384> <X= 0.00000, Y= 0.20000, GMV=0.2232175>) GMF(imt=PGV sa_period=None sa_damping=None rupture_id=scenario-0000000003 <X= 0.00000, Y= 0.00000, GMV=0.3279292> <X= 0.00000, Y= 0.10000, GMV=0.2357552> <X= 0.00000, Y= 0.20000, GMV=0.1781143>) GMF(imt=PGV sa_period=None sa_damping=None rupture_id=scenario-0000000004 <X= 0.00000, Y= 0.00000, GMV=0.6968686> <X= 0.00000, Y= 0.10000, GMV=0.2581405> <X= 0.00000, Y= 0.20000, GMV=0.1351649>))''')
def test_import_gmf_scenario(self): # gmfdata.xml is a file containing 2 IMTs, 5 ruptures and 3 sites fileobj = open(os.path.join(THISDIR, 'gmfdata.xml')) out = import_gmf_scenario.import_gmf_scenario(fileobj) hc = out.oq_job.get_oqparam() imts = sorted(hc.intensity_measure_types_and_levels) self.assertEqual(imts, ['PGA', 'PGV']) n = models.GmfData.objects.filter(gmf__output=out).count() assert_equal(hc.calculation_mode, 'scenario') assert_equal(hc.number_of_ground_motion_fields, 5) assert_equal(n, 30) # 30 rows entered, 2 x 5 x 3 assert_equal(hc.description, 'Scenario importer, file gmfdata.xml') # now test that exporting the imported data gives back the # original data [gmfset] = list(out.gmf) self.assertEqual(str(gmfset), '''\ GMFsPerSES(investigation_time=0.000000, stochastic_event_set_id=1, GMF(imt=PGA sa_period=None sa_damping=None rupture_id=scenario-0000000000 <X= 0.00000, Y= 0.00000, GMV=0.6824957> <X= 0.00000, Y= 0.10000, GMV=0.1270898> <X= 0.00000, Y= 0.20000, GMV=0.1603097>) GMF(imt=PGA sa_period=None sa_damping=None rupture_id=scenario-0000000001 <X= 0.00000, Y= 0.00000, GMV=0.3656627> <X= 0.00000, Y= 0.10000, GMV=0.2561813> <X= 0.00000, Y= 0.20000, GMV=0.1106853>) GMF(imt=PGA sa_period=None sa_damping=None rupture_id=scenario-0000000002 <X= 0.00000, Y= 0.00000, GMV=0.8700834> <X= 0.00000, Y= 0.10000, GMV=0.2106384> <X= 0.00000, Y= 0.20000, GMV=0.2232175>) GMF(imt=PGA sa_period=None sa_damping=None rupture_id=scenario-0000000003 <X= 0.00000, Y= 0.00000, GMV=0.3279292> <X= 0.00000, Y= 0.10000, GMV=0.2357552> <X= 0.00000, Y= 0.20000, GMV=0.1781143>) GMF(imt=PGA sa_period=None sa_damping=None rupture_id=scenario-0000000004 <X= 0.00000, Y= 0.00000, GMV=0.6968686> <X= 0.00000, Y= 0.10000, GMV=0.2581405> <X= 0.00000, Y= 0.20000, GMV=0.1351649>) GMF(imt=PGV sa_period=None sa_damping=None rupture_id=scenario-0000000000 <X= 0.00000, Y= 0.00000, GMV=0.6824957> <X= 0.00000, Y= 0.10000, GMV=0.1270898> <X= 0.00000, Y= 0.20000, GMV=0.1603097>) GMF(imt=PGV sa_period=None sa_damping=None rupture_id=scenario-0000000001 <X= 0.00000, Y= 0.00000, GMV=0.3656627> <X= 0.00000, Y= 0.10000, GMV=0.2561813> <X= 0.00000, Y= 0.20000, GMV=0.1106853>) GMF(imt=PGV sa_period=None sa_damping=None rupture_id=scenario-0000000002 <X= 0.00000, Y= 0.00000, GMV=0.8700834> <X= 0.00000, Y= 0.10000, GMV=0.2106384> <X= 0.00000, Y= 0.20000, GMV=0.2232175>) GMF(imt=PGV sa_period=None sa_damping=None rupture_id=scenario-0000000003 <X= 0.00000, Y= 0.00000, GMV=0.3279292> <X= 0.00000, Y= 0.10000, GMV=0.2357552> <X= 0.00000, Y= 0.20000, GMV=0.1781143>) GMF(imt=PGV sa_period=None sa_damping=None rupture_id=scenario-0000000004 <X= 0.00000, Y= 0.00000, GMV=0.6968686> <X= 0.00000, Y= 0.10000, GMV=0.2581405> <X= 0.00000, Y= 0.20000, GMV=0.1351649>))''')
def get_hazard_job(self): with open(self._test_path("gmf-scenario.xml")) as data: output = import_gmf_scenario(data) job = output.oq_job # this is needed to make happy the GetterBuilder job.hazard_calculation.number_of_ground_motion_fields = 3 job.hazard_calculation.save() return job
def test_import_gmf_scenario(self): repodir = os.path.dirname(os.path.dirname(nrmllib.__path__[0])) fileobj = open(os.path.join(repodir, 'examples', 'gmf-scenario.xml')) out, hc = import_gmf_scenario.import_gmf_scenario(fileobj) n = models.GmfData.objects.filter(gmf__output=out).count() assert_equal(n, 9) # 9 rows entered assert_equal(hc.description, 'Scenario importer, file gmf-scenario.xml')
def test_import_gmf_scenario(self): repodir = os.path.dirname(os.path.dirname(nrmllib.__path__[0])) fileobj = open(os.path.join(repodir, 'examples', 'gmf-scenario.xml')) out = import_gmf_scenario.import_gmf_scenario(fileobj) hc = out.oq_job.get_oqparam() n = models.GmfData.objects.filter(gmf__output=out).count() assert_equal(hc.calculation_mode, 'scenario') assert_equal(hc.number_of_ground_motion_fields, n) assert_equal(n, 9) # 9 rows entered assert_equal(hc.description, 'Scenario importer, file gmf-scenario.xml')
def test_import_gmf_scenario_csv(self): test_data = StringIO(unicode('''\ SA 0.025 5.0 {0.2} POINT(0.0 0.0) SA 0.025 5.0 {1.4} POINT(1.0 0.0) SA 0.025 5.0 {0.6} POINT(0.0 1.0) PGA \N \N {0.2,0.3} POINT(0.0 0.0) PGA \N \N {1.4,1.5} POINT(1.0 0.0) PGA \N \N {0.6,0.7} POINT(0.0 1.0) PGV \N \N {0.2} POINT(0.0 0.0) PGV \N \N {1.4} POINT(1.0 0.0) ''')) test_data.name = 'test_data' out = import_gmf_scenario.import_gmf_scenario(test_data) n = models.GmfData.objects.filter(gmf__output=out).count() assert_equal(n, 8) # 8 rows entered
def test_import_gmf_scenario_csv(self): test_data = StringIO( unicode('''\ SA 0.025 5.0 {0.2} POINT(0.0 0.0) SA 0.025 5.0 {1.4} POINT(1.0 0.0) SA 0.025 5.0 {0.6} POINT(0.0 1.0) PGA \N \N {0.2,0.3} POINT(0.0 0.0) PGA \N \N {1.4,1.5} POINT(1.0 0.0) PGA \N \N {0.6,0.7} POINT(0.0 1.0) PGV \N \N {0.2} POINT(0.0 0.0) PGV \N \N {1.4} POINT(1.0 0.0) ''')) test_data.name = 'test_data' out, _hc = import_gmf_scenario.import_gmf_scenario(test_data) n = models.GmfData.objects.filter(gmf__output=out).count() assert_equal(n, 8) # 8 rows entered
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() if args.version: engine.complain_and_exit(__version__) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.optimize_source_model: optimize_source_model(*args.optimize_source_model) if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.list_inputs: list_inputs(args.list_inputs) # hazard elif args.list_hazard_calculations: list_calculations(models.HazardCalculation.objects) elif args.list_hazard_outputs is not None: engine.list_hazard_outputs(args.list_hazard_outputs) elif args.export_hazard is not None: output_id, target_dir = args.export_hazard output_id = int(output_id) export_hazard(output_id, expanduser(target_dir), args.export_type) elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_hazard(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_hazard_calculation is not None: del_haz_calc(args.delete_hazard_calculation, args.yes) # risk elif args.list_risk_calculations: list_calculations(models.RiskCalculation.objects) elif args.list_risk_outputs is not None: engine.list_risk_outputs(args.list_risk_outputs) elif args.export_risk is not None: output_id, target_dir = args.export_risk export_risk(output_id, expanduser(target_dir), args.export_type) elif args.run_risk is not None: if (args.hazard_output_id is None and args.hazard_calculation_id is None): engine.complain_and_exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_risk(expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=args.hazard_calculation_id) elif args.delete_risk_calculation is not None: del_risk_calc(args.delete_risk_calculation, args.yes) # import elif args.load_gmf is not None: with open(args.load_gmf) as f: out, hc = import_gmf_scenario(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, hc.id) elif args.load_curve is not None: with open(args.load_curve) as f: out, hc = import_hazard_curves(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, hc.id) elif args.list_imported_outputs: list_imported_outputs() elif args.delete_uncompleted_calculations: delete_uncompleted_calculations() else: arg_parser.print_usage()
def get_hazard_job(self): with open(self._test_path('gmf-scenario.xml')) as data: output = import_gmf_scenario(data) return output.oq_job
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() if args.version: print __version__ sys.exit(0) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.upgrade_db: logging.basicConfig(level=logging.INFO) logs.set_level('info') conn = models.getcursor('admin').connection msg = upgrade_manager.what_if_I_upgrade( conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: conn = models.getcursor('admin').connection print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: conn = models.getcursor('admin').connection print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) if args.list_inputs: list_inputs(args.list_inputs) # hazard elif args.list_hazard_calculations: list_calculations(models.OqJob.objects) elif args.list_hazard_outputs is not None: engine.list_hazard_outputs(args.list_hazard_outputs) elif args.export_hazard is not None: output_id, target_dir = args.export_hazard output_id = int(output_id) export_hazard(output_id, expanduser(target_dir), args.export_type) elif args.export_hazard_outputs is not None: hc_id, target_dir = args.export_hazard_outputs export_hazard_outputs(int(hc_id), expanduser(target_dir), args.export_type) elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_hazard_calculation is not None: del_haz_calc(args.delete_hazard_calculation, args.yes) # risk elif args.list_risk_calculations: list_calculations(models.RiskCalculation.objects) elif args.list_risk_outputs is not None: engine.list_risk_outputs(args.list_risk_outputs) elif args.export_risk is not None: output_id, target_dir = args.export_risk export_risk(output_id, expanduser(target_dir), args.export_type) elif args.export_risk_outputs is not None: rc_id, target_dir = args.export_risk_outputs export_risk_outputs(int(rc_id), expanduser(target_dir), args.export_type) elif args.run_risk is not None: if (args.hazard_output_id is None and args.hazard_calculation_id is None): sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=args.hazard_calculation_id) elif args.delete_risk_calculation is not None: del_risk_calc(args.delete_risk_calculation, args.yes) # import elif args.load_gmf is not None: with open(args.load_gmf) as f: out = import_gmf_scenario(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.load_curve is not None: with open(args.load_curve) as f: out = import_hazard_curves(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.list_imported_outputs: list_imported_outputs() elif args.delete_uncompleted_calculations: delete_uncompleted_calculations() elif args.save_hazard_calculation: save_hazards.main(*args.save_hazard_calculation) elif args.load_hazard_calculation: hc_ids = load_hazards.hazard_load( models.getcursor('admin').connection, args.load_hazard_calculation) print "Load hazard calculation with IDs: %s" % hc_ids else: arg_parser.print_usage()
def test_duplicated_rupture_tag(self): fileobj = open(os.path.join(THISDIR, 'gmfdata-wrong.xml')) with self.assertRaises(import_gmf_scenario.DuplicatedTag) as ctx: import_gmf_scenario.import_gmf_scenario(fileobj) self.assertEqual(str(ctx.exception), 'scenario-0000000002')
def setUpClass(cls): connections["admin"].cursor() # open a connection cls.conn = connections["admin"].connection cls.output, cls.hc = import_gmf_scenario(test_data, "openquake") cls.output.oq_job = OqJob.objects.create(owner_id=1, hazard_calculation=cls.hc) # fake job cls.output.save()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print __version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.make_html_report: conn = models.getcursor('admin').connection print 'Written', make_report(conn, args.make_html_report) sys.exit(0) if args.upgrade_db: logs.set_level('info') conn = models.getcursor('admin').connection msg = upgrade_manager.what_if_I_upgrade(conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: conn = models.getcursor('admin').connection print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: conn = models.getcursor('admin').connection print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) if args.list_inputs: list_inputs(args.list_inputs) # hazard or hazard+risk elif args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if args.lite: # run hazard and risk together engine.run_job_lite(job_inis, args.log_level, log_file, args.exports) else: # run hazard job = engine.run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk if len(job_inis) == 2: engine.run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job.id) # hazard elif args.list_hazard_calculations: list_calculations('hazard') elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_hazard_calculation is not None: del_calc(args.delete_hazard_calculation, args.yes) # risk elif args.list_risk_calculations: list_calculations('risk') elif args.run_risk is not None: if (args.hazard_output_id is None and args.hazard_calculation_id is None): sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=args.hazard_calculation_id) elif args.delete_risk_calculation is not None: del_calc(args.delete_risk_calculation, args.yes) # export elif args.list_outputs is not None: engine.list_outputs(args.list_outputs) elif args.list_hazard_outputs is not None: deprecate('--list-hazard-outputs', '--list-outputs') engine.list_outputs(args.list_hazard_outputs) elif args.list_risk_outputs is not None: deprecate('--list-risk-outputs', '--list-outputs') engine.list_outputs(args.list_risk_outputs) elif args.export_output is not None: output_id, target_dir = args.export_output export(int(output_id), expanduser(target_dir), exports) elif args.export_hazard_output is not None: deprecate('--export-hazard-output', '--export-output') output_id, target_dir = args.export_hazard_output export(int(output_id), expanduser(target_dir), exports) elif args.export_risk_output is not None: deprecate('--export-hazard-output', '--export-output') output_id, target_dir = args.export_risk_output export(int(output_id), expanduser(target_dir), exports) elif args.export_outputs is not None: job_id, target_dir = args.export_outputs export_outputs(int(job_id), expanduser(target_dir), exports) elif args.export_stats is not None: job_id, target_dir, output_type = args.export_stats export_stats(int(job_id), expanduser(target_dir), output_type, exports) # deprecated elif args.export_hazard_outputs is not None: deprecate('--export-hazard-outputs', '--export-outputs') job_id, target_dir = args.export_hazard_outputs export_outputs(int(job_id), expanduser(target_dir), exports) elif args.export_risk_outputs is not None: deprecate('--export-risk-outputs', '--export-outputs') job_id, target_dir = args.export_risk_outputs export_outputs(int(job_id), expanduser(target_dir), exports) # import elif args.load_gmf is not None: with open(args.load_gmf) as f: out = import_gmf_scenario(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.load_curve is not None: with open(args.load_curve) as f: out = import_hazard_curves(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.list_imported_outputs: list_imported_outputs() elif args.delete_uncompleted_calculations: delete_uncompleted_calculations() elif args.save_hazard_calculation: save_hazards.main(*args.save_hazard_calculation) elif args.load_hazard_calculation: job_ids = load_hazards.hazard_load( models.getcursor('admin').connection, args.load_hazard_calculation) print "Load hazard calculation with IDs: %s" % job_ids else: arg_parser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print __version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.make_html_report: conn = models.getcursor('admin').connection print 'Written', make_report(conn, args.make_html_report) sys.exit(0) if args.upgrade_db: logs.set_level('info') conn = models.getcursor('admin').connection msg = upgrade_manager.what_if_I_upgrade( conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: conn = models.getcursor('admin').connection print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: conn = models.getcursor('admin').connection print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) if args.list_inputs: list_inputs(args.list_inputs) # hazard or hazard+risk elif args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if args.lite: # run hazard and risk together engine.run_job_lite(job_inis, args.log_level, log_file, args.exports) else: # run hazard job = engine.run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk if len(job_inis) == 2: engine.run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job.id) # hazard elif args.list_hazard_calculations: list_calculations('hazard') elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_hazard_calculation is not None: del_calc(args.delete_hazard_calculation, args.yes) # risk elif args.list_risk_calculations: list_calculations('risk') elif args.run_risk is not None: if (args.hazard_output_id is None and args.hazard_calculation_id is None): sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job( expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=args.hazard_calculation_id) elif args.delete_risk_calculation is not None: del_calc(args.delete_risk_calculation, args.yes) # export elif args.list_outputs is not None: engine.list_outputs(args.list_outputs) elif args.list_hazard_outputs is not None: deprecate('--list-hazard-outputs', '--list-outputs') engine.list_outputs(args.list_hazard_outputs) elif args.list_risk_outputs is not None: deprecate('--list-risk-outputs', '--list-outputs') engine.list_outputs(args.list_risk_outputs) elif args.export_output is not None: output_id, target_dir = args.export_output export(int(output_id), expanduser(target_dir), exports) elif args.export_hazard_output is not None: deprecate('--export-hazard-output', '--export-output') output_id, target_dir = args.export_hazard_output export(int(output_id), expanduser(target_dir), exports) elif args.export_risk_output is not None: deprecate('--export-hazard-output', '--export-output') output_id, target_dir = args.export_risk_output export(int(output_id), expanduser(target_dir), exports) elif args.export_outputs is not None: job_id, target_dir = args.export_outputs export_outputs(int(job_id), expanduser(target_dir), exports) elif args.export_stats is not None: job_id, target_dir, output_type = args.export_stats export_stats(int(job_id), expanduser(target_dir), output_type, exports) # deprecated elif args.export_hazard_outputs is not None: deprecate('--export-hazard-outputs', '--export-outputs') job_id, target_dir = args.export_hazard_outputs export_outputs(int(job_id), expanduser(target_dir), exports) elif args.export_risk_outputs is not None: deprecate('--export-risk-outputs', '--export-outputs') job_id, target_dir = args.export_risk_outputs export_outputs(int(job_id), expanduser(target_dir), exports) # import elif args.load_gmf is not None: with open(args.load_gmf) as f: out = import_gmf_scenario(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.load_curve is not None: with open(args.load_curve) as f: out = import_hazard_curves(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.list_imported_outputs: list_imported_outputs() elif args.delete_uncompleted_calculations: delete_uncompleted_calculations() elif args.save_hazard_calculation: save_hazards.main(*args.save_hazard_calculation) elif args.load_hazard_calculation: job_ids = load_hazards.hazard_load( models.getcursor('admin').connection, args.load_hazard_calculation) print "Load hazard calculation with IDs: %s" % job_ids else: arg_parser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() if args.version: print __version__ sys.exit(0) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.list_inputs: list_inputs(args.list_inputs) # hazard elif args.list_hazard_calculations: list_calculations(models.HazardCalculation.objects) elif args.list_hazard_outputs is not None: engine.list_hazard_outputs(args.list_hazard_outputs) elif args.export_hazard is not None: output_id, target_dir = args.export_hazard output_id = int(output_id) export_hazard(output_id, expanduser(target_dir), args.export_type) elif args.export_hazard_outputs is not None: hc_id, target_dir = args.export_hazard_outputs export_hazard_outputs(int(hc_id), expanduser(target_dir), args.export_type) elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_hazard_calculation is not None: del_haz_calc(args.delete_hazard_calculation, args.yes) # risk elif args.list_risk_calculations: list_calculations(models.RiskCalculation.objects) elif args.list_risk_outputs is not None: engine.list_risk_outputs(args.list_risk_outputs) elif args.export_risk is not None: output_id, target_dir = args.export_risk export_risk(output_id, expanduser(target_dir), args.export_type) elif args.export_risk_outputs is not None: rc_id, target_dir = args.export_risk_outputs export_risk_outputs(int(rc_id), expanduser(target_dir), args.export_type) elif args.run_risk is not None: if (args.hazard_output_id is None and args.hazard_calculation_id is None): sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=args.hazard_calculation_id) elif args.delete_risk_calculation is not None: del_risk_calc(args.delete_risk_calculation, args.yes) # import elif args.load_gmf is not None: with open(args.load_gmf) as f: out = import_gmf_scenario(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.hazard_calculation.id) elif args.load_curve is not None: with open(args.load_curve) as f: out = import_hazard_curves(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.hazard_calculation.id) elif args.list_imported_outputs: list_imported_outputs() elif args.delete_uncompleted_calculations: delete_uncompleted_calculations() elif args.save_hazard_calculation: save_hazards.main(*args.save_hazard_calculation) elif args.load_hazard_calculation: hc_ids = load_hazards.hazard_load( models.getcursor('admin').connection, args.load_hazard_calculation) print "Load hazard calculation with IDs: %s" % hc_ids else: arg_parser.print_usage()