def test_successful_job_lifecycle(self): def test_status_running_and_succeed(*args): self.assertEqual('running', self._calculation_status()) return [] def patch_job_launch(*args, **kwargs): self.job = self.job_from_file(*args, **kwargs) self.assertEqual('pending', self._calculation_status()) return self.job before_launch = engine._launch_job try: engine._launch_job = mock.Mock( side_effect=test_status_running_and_succeed) with patch('openquake.engine._job_from_file') as from_file: from_file.side_effect = patch_job_launch with patch('os.fork', mocksignature=False) as fork: fork.return_value = 0 engine.run_job(self.job, self.params, self.sections) self.assertEqual(1, engine._launch_job.call_count) self.assertEqual('succeeded', self._calculation_status()) finally: engine._launch_job = before_launch
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() if args.version: print utils_version.info(__version__) elif args.config_file is not None: from openquake import job from openquake import engine try: if args.log_file is not None: # Capture logging messages to a file. try: _touch_log_file(args.log_file) except IOError as e: raise IOError("Error writing to log file %s: %s" % (args.log_file, e.strerror)) user_name = getpass.getuser() ajob = engine.prepare_job(user_name) _, params, sections = engine.import_job_profile(args.config_file, ajob, user_name, args.force_inputs) engine.run_job( ajob, params, sections, output_type=args.output_type, log_level=args.log_level, force_inputs=args.force_inputs, log_file=args.log_file, ) except job.config.ValidationException as e: print str(e) except IOError as e: print str(e) except Exception as e: raise elif args.list_calculations: list_calculations() elif args.list_outputs is not None: list_outputs(args.list_outputs) elif args.export is not None: output_id, target_dir = args.export output_id = int(output_id) do_export(output_id, target_dir) else: arg_parser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() if args.version: print utils_version.info(__version__) elif args.config_file is not None: from openquake import job from openquake import engine try: if args.log_file is not None: # Capture logging messages to a file. try: _touch_log_file(args.log_file) except IOError as e: raise IOError('Error writing to log file %s: %s' % (args.log_file, e.strerror)) user_name = getpass.getuser() ajob = engine.prepare_job(user_name) _, params, sections = engine.import_job_profile( args.config_file, ajob, user_name, args.force_inputs) engine.run_job(ajob, params, sections, output_type=args.output_type, log_level=args.log_level, force_inputs=args.force_inputs, log_file=args.log_file) except job.config.ValidationException as e: print str(e) except IOError as e: print str(e) except Exception as e: raise elif args.list_calculations: list_calculations() elif args.list_outputs is not None: list_outputs(args.list_outputs) elif args.export is not None: output_id, target_dir = args.export output_id = int(output_id) do_export(output_id, target_dir) else: arg_parser.print_usage()
def test_run_job_deletes_job_counters(self): # This test ensures that # :function:`openquake.utils.stats.delete_job_counters` is called cfg_path = helpers.demo_file('HazardMapTest/config.gem') job_profile, params, sections = engine.import_job_profile( cfg_path, self.job) # We don't want any of the supervisor/executor forking to happen; it's # not necessary. Also, forking should not happen in the context of a # test run. with helpers.patch('os.fork', mocksignature=False) as fork_mock: # Fake return val for fork: fork_mock.return_value = 0 # And we don't actually want to run the job. with helpers.patch('openquake.engine._launch_job'): with helpers.patch( 'openquake.utils.stats.delete_job_counters') as djc_mock: engine.run_job(self.job, params, sections) self.assertEquals(1, djc_mock.call_count)
def test_run_job_deletes_job_counters(self): # This test ensures that # :function:`openquake.utils.stats.delete_job_counters` is called cfg_path = helpers.demo_file('HazardMapTest/config.gem') job_profile, params, sections = engine.import_job_profile( cfg_path, self.job) # We don't want any of the supervisor/executor forking to happen; it's # not necessary. Also, forking should not happen in the context of a # test run. with helpers.patch('os.fork', mocksignature=False) as fork_mock: # Fake return val for fork: fork_mock.return_value = 0 # And we don't actually want to run the job. with helpers.patch('openquake.engine._launch_job'): with helpers.patch( 'openquake.utils.stats.delete_job_counters') as djc_mock: engine.run_job(self.job, params, sections) self.assertEqual(1, djc_mock.call_count)
def test_supervisor_is_spawned(self): with patch("openquake.engine._job_from_file"): before_launch = engine._launch_job try: engine._launch_job = mock.Mock() with patch("os.fork", mocksignature=False) as fork: def fork_side_effect(): fork.side_effect = lambda: 0 return 1234 fork.side_effect = fork_side_effect superv_func = "openquake.supervising.supervisor.supervise" with patch(superv_func) as sv: engine.run_job(self.job, self.params, self.sections) job = models.OqJob.objects.latest(field_name="last_update") self.assertEqual(1, sv.call_count) self.assertEqual(((1234, job.id), {}), sv.call_args) finally: engine._launch_job = before_launch
def test_supervisor_is_spawned(self): with patch('openquake.engine._job_from_file'): before_launch = engine._launch_job try: engine._launch_job = mock.Mock() with patch('os.fork', mocksignature=False) as fork: def fork_side_effect(): fork.side_effect = lambda: 0 return 1234 fork.side_effect = fork_side_effect superv_func = 'openquake.supervising.supervisor.supervise' with patch(superv_func) as sv: engine.run_job(self.job, self.params, self.sections) job = models.OqJob.objects.latest( field_name='last_update') self.assertEqual(1, sv.call_count) self.assertEqual(((1234, job.id), {'log_file': None}), sv.call_args) finally: engine._launch_job = before_launch
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() if args.version: print __version__ sys.exit(0) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.upgrade_db: logging.basicConfig(level=logging.INFO) logs.set_level('info') conn = models.getcursor('admin').connection msg = upgrade_manager.what_if_I_upgrade( conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: conn = models.getcursor('admin').connection print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: conn = models.getcursor('admin').connection print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) if args.list_inputs: list_inputs(args.list_inputs) # hazard elif args.list_hazard_calculations: list_calculations(models.OqJob.objects) elif args.list_hazard_outputs is not None: engine.list_hazard_outputs(args.list_hazard_outputs) elif args.export_hazard is not None: output_id, target_dir = args.export_hazard output_id = int(output_id) export_hazard(output_id, expanduser(target_dir), args.export_type) elif args.export_hazard_outputs is not None: hc_id, target_dir = args.export_hazard_outputs export_hazard_outputs(int(hc_id), expanduser(target_dir), args.export_type) elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_hazard_calculation is not None: del_haz_calc(args.delete_hazard_calculation, args.yes) # risk elif args.list_risk_calculations: list_calculations(models.RiskCalculation.objects) elif args.list_risk_outputs is not None: engine.list_risk_outputs(args.list_risk_outputs) elif args.export_risk is not None: output_id, target_dir = args.export_risk export_risk(output_id, expanduser(target_dir), args.export_type) elif args.export_risk_outputs is not None: rc_id, target_dir = args.export_risk_outputs export_risk_outputs(int(rc_id), expanduser(target_dir), args.export_type) elif args.run_risk is not None: if (args.hazard_output_id is None and args.hazard_calculation_id is None): sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=args.hazard_calculation_id) elif args.delete_risk_calculation is not None: del_risk_calc(args.delete_risk_calculation, args.yes) # import elif args.load_gmf is not None: with open(args.load_gmf) as f: out = import_gmf_scenario(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.load_curve is not None: with open(args.load_curve) as f: out = import_hazard_curves(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.list_imported_outputs: list_imported_outputs() elif args.delete_uncompleted_calculations: delete_uncompleted_calculations() elif args.save_hazard_calculation: save_hazards.main(*args.save_hazard_calculation) elif args.load_hazard_calculation: hc_ids = load_hazards.hazard_load( models.getcursor('admin').connection, args.load_hazard_calculation) print "Load hazard calculation with IDs: %s" % hc_ids else: arg_parser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print __version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.make_html_report: conn = models.getcursor('admin').connection print 'Written', make_report(conn, args.make_html_report) sys.exit(0) if args.upgrade_db: logs.set_level('info') conn = models.getcursor('admin').connection msg = upgrade_manager.what_if_I_upgrade(conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: conn = models.getcursor('admin').connection print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: conn = models.getcursor('admin').connection print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) if args.list_inputs: list_inputs(args.list_inputs) # hazard or hazard+risk elif args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if args.lite: # run hazard and risk together engine.run_job_lite(job_inis, args.log_level, log_file, args.exports) else: # run hazard job = engine.run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk if len(job_inis) == 2: engine.run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job.id) # hazard elif args.list_hazard_calculations: list_calculations('hazard') elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_hazard_calculation is not None: del_calc(args.delete_hazard_calculation, args.yes) # risk elif args.list_risk_calculations: list_calculations('risk') elif args.run_risk is not None: if (args.hazard_output_id is None and args.hazard_calculation_id is None): sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=args.hazard_calculation_id) elif args.delete_risk_calculation is not None: del_calc(args.delete_risk_calculation, args.yes) # export elif args.list_outputs is not None: engine.list_outputs(args.list_outputs) elif args.list_hazard_outputs is not None: deprecate('--list-hazard-outputs', '--list-outputs') engine.list_outputs(args.list_hazard_outputs) elif args.list_risk_outputs is not None: deprecate('--list-risk-outputs', '--list-outputs') engine.list_outputs(args.list_risk_outputs) elif args.export_output is not None: output_id, target_dir = args.export_output export(int(output_id), expanduser(target_dir), exports) elif args.export_hazard_output is not None: deprecate('--export-hazard-output', '--export-output') output_id, target_dir = args.export_hazard_output export(int(output_id), expanduser(target_dir), exports) elif args.export_risk_output is not None: deprecate('--export-hazard-output', '--export-output') output_id, target_dir = args.export_risk_output export(int(output_id), expanduser(target_dir), exports) elif args.export_outputs is not None: job_id, target_dir = args.export_outputs export_outputs(int(job_id), expanduser(target_dir), exports) elif args.export_stats is not None: job_id, target_dir, output_type = args.export_stats export_stats(int(job_id), expanduser(target_dir), output_type, exports) # deprecated elif args.export_hazard_outputs is not None: deprecate('--export-hazard-outputs', '--export-outputs') job_id, target_dir = args.export_hazard_outputs export_outputs(int(job_id), expanduser(target_dir), exports) elif args.export_risk_outputs is not None: deprecate('--export-risk-outputs', '--export-outputs') job_id, target_dir = args.export_risk_outputs export_outputs(int(job_id), expanduser(target_dir), exports) # import elif args.load_gmf is not None: with open(args.load_gmf) as f: out = import_gmf_scenario(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.load_curve is not None: with open(args.load_curve) as f: out = import_hazard_curves(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.list_imported_outputs: list_imported_outputs() elif args.delete_uncompleted_calculations: delete_uncompleted_calculations() elif args.save_hazard_calculation: save_hazards.main(*args.save_hazard_calculation) elif args.load_hazard_calculation: job_ids = load_hazards.hazard_load( models.getcursor('admin').connection, args.load_hazard_calculation) print "Load hazard calculation with IDs: %s" % job_ids else: arg_parser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() exports = args.exports or 'xml,csv' if args.version: print __version__ sys.exit(0) if args.run or args.run_hazard or args.run_risk: # the logging will be configured in engine.py pass else: # configure a basic logging logging.basicConfig(level=logging.INFO) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.make_html_report: conn = models.getcursor('admin').connection print 'Written', make_report(conn, args.make_html_report) sys.exit(0) if args.upgrade_db: logs.set_level('info') conn = models.getcursor('admin').connection msg = upgrade_manager.what_if_I_upgrade( conn, extract_scripts='read_scripts') print msg if msg.startswith('Your database is already updated'): pass elif args.yes or confirm('Proceed? (y/n) '): upgrade_manager.upgrade_db(conn) sys.exit(0) if args.version_db: conn = models.getcursor('admin').connection print upgrade_manager.version_db(conn) sys.exit(0) if args.what_if_I_upgrade: conn = models.getcursor('admin').connection print upgrade_manager.what_if_I_upgrade(conn) sys.exit(0) if args.list_inputs: list_inputs(args.list_inputs) # hazard or hazard+risk elif args.run: job_inis = map(expanduser, args.run.split(',')) if len(job_inis) not in (1, 2): sys.exit('%s should be a .ini filename or a pair of filenames ' 'separated by a comma' % args.run) for job_ini in job_inis: open(job_ini).read() # raise an IOError if the file does not exist log_file = expanduser(args.log_file) \ if args.log_file is not None else None if args.lite: # run hazard and risk together engine.run_job_lite(job_inis, args.log_level, log_file, args.exports) else: # run hazard job = engine.run_job(job_inis[0], args.log_level, log_file, args.exports) # run risk if len(job_inis) == 2: engine.run_job(job_inis[1], args.log_level, log_file, args.exports, hazard_calculation_id=job.id) # hazard elif args.list_hazard_calculations: list_calculations('hazard') elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_hazard_calculation is not None: del_calc(args.delete_hazard_calculation, args.yes) # risk elif args.list_risk_calculations: list_calculations('risk') elif args.run_risk is not None: if (args.hazard_output_id is None and args.hazard_calculation_id is None): sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job( expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=args.hazard_calculation_id) elif args.delete_risk_calculation is not None: del_calc(args.delete_risk_calculation, args.yes) # export elif args.list_outputs is not None: engine.list_outputs(args.list_outputs) elif args.list_hazard_outputs is not None: deprecate('--list-hazard-outputs', '--list-outputs') engine.list_outputs(args.list_hazard_outputs) elif args.list_risk_outputs is not None: deprecate('--list-risk-outputs', '--list-outputs') engine.list_outputs(args.list_risk_outputs) elif args.export_output is not None: output_id, target_dir = args.export_output export(int(output_id), expanduser(target_dir), exports) elif args.export_hazard_output is not None: deprecate('--export-hazard-output', '--export-output') output_id, target_dir = args.export_hazard_output export(int(output_id), expanduser(target_dir), exports) elif args.export_risk_output is not None: deprecate('--export-hazard-output', '--export-output') output_id, target_dir = args.export_risk_output export(int(output_id), expanduser(target_dir), exports) elif args.export_outputs is not None: job_id, target_dir = args.export_outputs export_outputs(int(job_id), expanduser(target_dir), exports) elif args.export_stats is not None: job_id, target_dir, output_type = args.export_stats export_stats(int(job_id), expanduser(target_dir), output_type, exports) # deprecated elif args.export_hazard_outputs is not None: deprecate('--export-hazard-outputs', '--export-outputs') job_id, target_dir = args.export_hazard_outputs export_outputs(int(job_id), expanduser(target_dir), exports) elif args.export_risk_outputs is not None: deprecate('--export-risk-outputs', '--export-outputs') job_id, target_dir = args.export_risk_outputs export_outputs(int(job_id), expanduser(target_dir), exports) # import elif args.load_gmf is not None: with open(args.load_gmf) as f: out = import_gmf_scenario(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.load_curve is not None: with open(args.load_curve) as f: out = import_hazard_curves(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.id) elif args.list_imported_outputs: list_imported_outputs() elif args.delete_uncompleted_calculations: delete_uncompleted_calculations() elif args.save_hazard_calculation: save_hazards.main(*args.save_hazard_calculation) elif args.load_hazard_calculation: job_ids = load_hazards.hazard_load( models.getcursor('admin').connection, args.load_hazard_calculation) print "Load hazard calculation with IDs: %s" % job_ids else: arg_parser.print_usage()
def main(): arg_parser = set_up_arg_parser() args = arg_parser.parse_args() if args.version: print __version__ sys.exit(0) if args.config_file: os.environ[config.OQ_CONFIG_FILE_VAR] = \ abspath(expanduser(args.config_file)) config.refresh() if args.no_distribute: os.environ[openquake.engine.NO_DISTRIBUTE_VAR] = '1' if args.list_inputs: list_inputs(args.list_inputs) # hazard elif args.list_hazard_calculations: list_calculations(models.HazardCalculation.objects) elif args.list_hazard_outputs is not None: engine.list_hazard_outputs(args.list_hazard_outputs) elif args.export_hazard is not None: output_id, target_dir = args.export_hazard output_id = int(output_id) export_hazard(output_id, expanduser(target_dir), args.export_type) elif args.export_hazard_outputs is not None: hc_id, target_dir = args.export_hazard_outputs export_hazard_outputs(int(hc_id), expanduser(target_dir), args.export_type) elif args.run_hazard is not None: log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_hazard), args.log_level, log_file, args.exports) elif args.delete_hazard_calculation is not None: del_haz_calc(args.delete_hazard_calculation, args.yes) # risk elif args.list_risk_calculations: list_calculations(models.RiskCalculation.objects) elif args.list_risk_outputs is not None: engine.list_risk_outputs(args.list_risk_outputs) elif args.export_risk is not None: output_id, target_dir = args.export_risk export_risk(output_id, expanduser(target_dir), args.export_type) elif args.export_risk_outputs is not None: rc_id, target_dir = args.export_risk_outputs export_risk_outputs(int(rc_id), expanduser(target_dir), args.export_type) elif args.run_risk is not None: if (args.hazard_output_id is None and args.hazard_calculation_id is None): sys.exit(MISSING_HAZARD_MSG) log_file = expanduser(args.log_file) \ if args.log_file is not None else None engine.run_job(expanduser(args.run_risk), args.log_level, log_file, args.exports, hazard_output_id=args.hazard_output_id, hazard_calculation_id=args.hazard_calculation_id) elif args.delete_risk_calculation is not None: del_risk_calc(args.delete_risk_calculation, args.yes) # import elif args.load_gmf is not None: with open(args.load_gmf) as f: out = import_gmf_scenario(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.hazard_calculation.id) elif args.load_curve is not None: with open(args.load_curve) as f: out = import_hazard_curves(f) print 'Added output id=%d of type %s; hazard_calculation_id=%d'\ % (out.id, out.output_type, out.oq_job.hazard_calculation.id) elif args.list_imported_outputs: list_imported_outputs() elif args.delete_uncompleted_calculations: delete_uncompleted_calculations() elif args.save_hazard_calculation: save_hazards.main(*args.save_hazard_calculation) elif args.load_hazard_calculation: hc_ids = load_hazards.hazard_load( models.getcursor('admin').connection, args.load_hazard_calculation) print "Load hazard calculation with IDs: %s" % hc_ids else: arg_parser.print_usage()