def zip_job(job_ini, archive_zip='', risk_ini='', oq=None, log=logging.info): """ Zip the given job.ini file into the given archive, together with all related files. """ if not os.path.exists(job_ini): sys.exit('%s does not exist' % job_ini) archive_zip = archive_zip or 'job.zip' if isinstance(archive_zip, str): # actually it should be path-like if not archive_zip.endswith('.zip'): sys.exit('%s does not end with .zip' % archive_zip) if os.path.exists(archive_zip): sys.exit('%s exists already' % archive_zip) # do not validate to avoid permissions error on the export_dir oq = oq or oqvalidation.OqParam(**readinput.get_params(job_ini)) if risk_ini: risk_ini = os.path.normpath(os.path.abspath(risk_ini)) oqr = readinput.get_oqparam(risk_ini) del oqr.inputs['job_ini'] oq.inputs.update(oqr.inputs) oq.shakemap_uri.update(oqr.shakemap_uri) files = readinput.get_input_files(oq) if risk_ini: files = [risk_ini] + files return general.zipfiles(files, archive_zip, log=log)
def create_jobs(job_inis, loglvl, kw): """ Create job records on the database (if not already there) and configure the logging. """ dicts = [] for i, job_ini in enumerate(job_inis): if isinstance(job_ini, dict): dic = job_ini else: # NB: `get_params` must NOT log, since the logging is not # configured yet, otherwise the log will disappear :-( dic = readinput.get_params(job_ini, kw) if 'sensitivity_analysis' in dic: analysis = valid.dictionary(dic['sensitivity_analysis']) for values in itertools.product(*analysis.values()): new = dic.copy() _init_logs(new, loglvl) if '_job_id' in dic: del dic['_job_id'] pars = dict(zip(analysis, values)) for param, value in pars.items(): new[param] = str(value) new['description'] = '%s %s' % (new['description'], pars) logging.info('Job with %s', pars) dicts.append(new) else: _init_logs(dic, loglvl) dicts.append(dic) return dicts
def _run(job_ini, concurrent_tasks, pdb, reuse_input, loglevel, exports, params): global calc_path if 'hazard_calculation_id' in params: hc_id = int(params['hazard_calculation_id']) if hc_id < 0: # interpret negative calculation ids calc_ids = datastore.get_calc_ids() try: params['hazard_calculation_id'] = calc_ids[hc_id] except IndexError: raise SystemExit('There are %d old calculations, cannot ' 'retrieve the %s' % (len(calc_ids), hc_id)) else: params['hazard_calculation_id'] = hc_id dic = readinput.get_params(job_ini, params) # set the logs first of all log = logs.init("job", dic, getattr(logging, loglevel.upper())) # disable gzip_input base.BaseCalculator.gzip_inputs = lambda self: None with log, performance.Monitor('total runtime', measuremem=True) as monitor: calc = base.calculators(log.get_oqparam(), log.calc_id) if reuse_input: # enable caching calc.oqparam.cachedir = datastore.get_datadir() calc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports) logging.info('Total time spent: %s s', monitor.duration) logging.info('Memory allocated: %s', general.humansize(monitor.mem)) print('See the output with silx view %s' % calc.datastore.filename) calc_path, _ = os.path.splitext(calc.datastore.filename) # used below return calc
def __init__(self, job_ini, calc_id, log_level='info', log_file=None, user_name=None, hc_id=None): self.log_level = log_level self.log_file = log_file self.user_name = user_name or getpass.getuser() if isinstance(job_ini, dict): # dictionary of parameters self.params = job_ini else: # path to job.ini file self.params = readinput.get_params(job_ini) if hc_id: self.params['hazard_calculation_id'] = hc_id if calc_id == 0: self.calc_id = dbcmd('create_job', get_datadir(), self.params['calculation_mode'], self.params['description'], user_name, hc_id) elif calc_id == -1: # only works in single-user situations self.calc_id = get_last_calc_id() + 1 else: # assume the calc_id was alreay created in the db self.calc_id = calc_id
def job_from_file_lite(cfg_file, username, log_level='info', exports='', **extras): """ Create a full job profile from a job config file. :param str cfg_file: Path to the job.ini files. :param str username: The user who will own this job profile and all results. :param str log_level: Desired log level. :param exports: Comma-separated sting of desired export types. :params extras: Extra parameters (used only in the tests to override the params) :returns: :class:`openquake.engine.db.models.OqJob` object :raises: `RuntimeError` if the input job configuration is not valid """ from openquake.commonlib.calculators import base # create the current job job = create_job(user_name=username, log_level=log_level) models.JobStats.objects.create(oq_job=job) with logs.handle(job, log_level): # read calculation params and create the calculation profile params = readinput.get_params([cfg_file]) params.update(extras) # build and validate an OqParam object oqparam = readinput.get_oqparam(params, calculators=base.calculators) job.save_params(vars(oqparam)) job.save() return job
def zip(job_ini, archive_zip, risk_ini, oq=None, log=logging.info): """ Zip the given job.ini file into the given archive, together with all related files. """ if not os.path.exists(job_ini): sys.exit('%s does not exist' % job_ini) if isinstance(archive_zip, str): # actually it should be path-like if not archive_zip.endswith('.zip'): sys.exit('%s does not end with .zip' % archive_zip) if os.path.exists(archive_zip): sys.exit('%s exists already' % archive_zip) logging.basicConfig(level=logging.INFO) # do not validate to avoid permissions error on the export_dir oq = oq or readinput.get_oqparam(job_ini, validate=False) files = set() if risk_ini: risk_ini = os.path.normpath(os.path.abspath(risk_ini)) oq.inputs.update(readinput.get_params([risk_ini])['inputs']) files.add(os.path.normpath(os.path.abspath(job_ini))) # collect .hdf5 tables for the GSIMs, if any if 'gsim_logic_tree' in oq.inputs or oq.gsim: gsim_lt = readinput.get_gsim_lt(oq) for gsims in gsim_lt.values.values(): for gsim in gsims: table = getattr(gsim, 'GMPE_TABLE', None) if table: files.add(table) # collect exposure.csv, if any exposure_xml = oq.inputs.get('exposure') if exposure_xml: dname = os.path.dirname(exposure_xml) expo = nrml.read(exposure_xml, stop='asset')[0] if not expo.assets: exposure_csv = (~expo.assets).strip() for csv in exposure_csv.split(): if csv and os.path.exists(os.path.join(dname, csv)): files.add(os.path.join(dname, csv)) # collection .hdf5 UCERF file, if any if oq.calculation_mode.startswith('ucerf_'): sm = nrml.read(oq.inputs['source_model']) fname = sm.sourceModel.UCERFSource['filename'] f = os.path.join(os.path.dirname(oq.inputs['source_model']), fname) files.add(os.path.normpath(f)) # collect all other files for key in oq.inputs: fname = oq.inputs[key] if isinstance(fname, list): for f in fname: files.add(os.path.normpath(f)) elif isinstance(fname, dict): for f in fname.values(): files.add(os.path.normpath(f)) else: files.add(os.path.normpath(fname)) general.zipfiles(files, archive_zip, log=log)
def test_no_absolute_path(self): temp_dir = tempfile.mkdtemp() site_model_input = general.gettemp(dir=temp_dir, content="foo") job_config = general.gettemp(dir=temp_dir, content=""" [general] calculation_mode = event_based [foo] bar = baz [site] sites = 0 0 site_model_file = %s maximum_distance=1 truncation_level=0 random_seed=0 intensity_measure_types = PGA investigation_time = 50 export_dir = %s """ % (site_model_input, TMP)) with self.assertRaises(ValueError) as ctx: readinput.get_params([job_config]) self.assertIn('is an absolute path', str(ctx.exception))
def test_no_absolute_path(self): temp_dir = tempfile.mkdtemp() site_model_input = general.writetmp(dir=temp_dir, content="foo") job_config = general.writetmp(dir=temp_dir, content=""" [general] calculation_mode = event_based [foo] bar = baz [site] sites = 0 0 site_model_file = %s maximum_distance=1 truncation_level=0 random_seed=0 intensity_measure_types = PGA investigation_time = 50 export_dir = %s """ % (site_model_input, TMP)) with self.assertRaises(ValueError) as ctx: readinput.get_params([job_config]) self.assertIn('is an absolute path', str(ctx.exception))
def get_calc(self, testfile, job_ini, **kw): """ Return the outputs of the calculation as a dictionary """ self.testdir = os.path.dirname(testfile) if os.path.isfile(testfile) else testfile inis = [os.path.join(self.testdir, ini) for ini in job_ini.split(",")] params = readinput.get_params(inis) params.update(kw) oq = oqvalidation.OqParam(**params) oq.validate() # change this when debugging the test monitor = PerformanceMonitor(self.testdir) return base.calculators(oq, monitor)
def get_calc(self, testfile, job_ini, **kw): """ Return the outputs of the calculation as a dictionary """ self.testdir = os.path.dirname(testfile) if os.path.isfile(testfile) \ else testfile params = readinput.get_params(os.path.join(self.testdir, job_ini), kw) oqvalidation.OqParam.calculation_mode.validator.choices = tuple( base.calculators) oq = oqvalidation.OqParam(**params) oq.validate() # change this when debugging the test log = logs.init('calc', params) return base.calculators(oq, log.calc_id)
def get_calc(self, testfile, job_ini, **kw): """ Return the outputs of the calculation as a dictionary """ self.testdir = os.path.dirname(testfile) if os.path.isfile(testfile) \ else testfile inis = [os.path.join(self.testdir, ini) for ini in job_ini.split(',')] params = readinput.get_params(inis) params.update(kw) oq = oqvalidation.OqParam(**params) oq.validate() # change this when debugging the test monitor = Monitor(self.testdir) return base.calculators(oq, monitor)
def submit_job(job_ini, username, **kw): """ Create a job object from the given job.ini file in the job directory and run it in a new process. Returns a PID. """ # errors in validating oqparam are reported immediately params = readinput.get_params(job_ini) job_id = logs.init('job') params['_job_id'] = job_id proc = Process(target=engine.run_jobs, args=([params], config.distribution.log_level, None, '', username), kwargs=kw) proc.start() return job_id
def get_calc(self, testfile, job_ini, **kw): """ Return the outputs of the calculation as a dictionary """ self.testdir = os.path.dirname(testfile) if os.path.isfile(testfile) \ else testfile inis = [os.path.join(self.testdir, ini) for ini in job_ini.split(',')] params = readinput.get_params(inis, **kw) oqvalidation.OqParam.calculation_mode.validator.choices = tuple( base.calculators) oq = oqvalidation.OqParam(**params) oq.validate() # change this when debugging the test return base.calculators(oq)
def main(job_ini_or_zip_or_nrmls): """ Check the validity of job.ini files, job.zip files and .xml files. NB: `oq check_input job_haz.ini job_risk.ini` is special-cased so that the risk files are checked before the hazard files. """ if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'): os.environ['OQ_DISTRIBUTE'] = 'processpool' all_inis = all(f.endswith('.ini') for f in job_ini_or_zip_or_nrmls) if all_inis: # the typical case is job_haz.ini + job_risk.ini dic = {} for ini in job_ini_or_zip_or_nrmls: for key, val in readinput.get_params(ini).items(): if key == 'inputs' and key in dic: dic[key].update(val) else: # the last wins dic[key] = val with logs.init('job', dic) as log: logging.info('Running oq check_input %s', ' '.join(job_ini_or_zip_or_nrmls)) calc = base.calculators(log.get_oqparam(), log.calc_id) base.BaseCalculator.gzip_inputs = lambda self: None # disable with mock.patch.dict(os.environ, {'OQ_CHECK_INPUT': '1'}): calc.read_inputs() return for job_ini_or_zip_or_nrml in job_ini_or_zip_or_nrmls: if job_ini_or_zip_or_nrml.endswith('.xml'): try: node = nrml.to_python(job_ini_or_zip_or_nrml) if node.tag.endswith('exposureModel'): err = Exposure.check(job_ini_or_zip_or_nrml) if err: logging.warning(err) else: logging.info('Checked %s', job_ini_or_zip_or_nrml) except Exception as exc: sys.exit(exc) else: # .zip with logs.init('job', job_ini_or_zip_or_nrml) as log: path = os.path.abspath(job_ini_or_zip_or_nrml) logging.info('Running oq check_input %s', path) calc = base.calculators(log.get_oqparam(), log.calc_id) base.BaseCalculator.gzip_inputs = lambda self: None # disable with mock.patch.dict(os.environ, {'OQ_CHECK_INPUT': '1'}): calc.read_inputs()
def create_jobs(job_inis, log_level=logging.INFO, log_file=None, user_name=None, hc_id=None, multi=False): """ Create job records on the database. :returns: a list of LogContext objects """ if len(job_inis) > 1 and not hc_id and not multi: # first job as hc job = logs.init("job", job_inis[0], log_level, log_file, user_name, hc_id) hc_id = job.calc_id jobs = [job] job_inis = job_inis[1:] else: jobs = [] for job_ini in job_inis: if isinstance(job_ini, dict): dic = job_ini else: # NB: `get_params` must NOT log, since the logging is not # configured yet, otherwise the log will disappear :-( dic = readinput.get_params(job_ini) dic['hazard_calculation_id'] = hc_id if 'sensitivity_analysis' in dic: analysis = valid.dictionary(dic['sensitivity_analysis']) for values in itertools.product(*analysis.values()): new = logs.init('job', dic, log_level, None, user_name, hc_id) pars = dict(zip(analysis, values)) for param, value in pars.items(): new.params[param] = str(value) new.params['description'] = '%s %s' % ( new.params['description'], pars) new.params['hazard_calculation_id'] = hc_id logging.info('Job with %s', pars) jobs.append(new) else: jobs.append( logs.init('job', dic, log_level, None, user_name, hc_id)) if multi: for job in jobs: job.multi = True return jobs
def get_calc(self, testfile, job_ini, **kw): """ Return the outputs of the calculation as a dictionary """ self.testdir = os.path.dirname(testfile) if os.path.isfile(testfile) \ else testfile inis = [os.path.join(self.testdir, ini) for ini in job_ini.split(',')] inputs = {k[:-5]: kw.pop(k) for k in list(kw) if k.endswith('_file')} params = readinput.get_params(inis, **inputs) params.update(kw) oqvalidation.OqParam.calculation_mode.validator.choices = tuple( base.calculators) oq = oqvalidation.OqParam(**params) oq.validate() # change this when debugging the test monitor = performance.Monitor(self.testdir) return base.calculators(oq, monitor)
def job_from_files(cfg_files, username, log_level='info', exports='', **extras): """ Create a full job profile from a job config file. :param str cfg_files_path: Path to the job.ini files. :param str username: The user who will own this job profile and all results. :param str log_level: Desired log level. :param exports: Comma-separated sting of desired export types. :params extras: Extra parameters (used only in the tests to override the params) :returns: :class:`openquake.engine.db.models.OqJob` object :raises: `RuntimeError` if the input job configuration is not valid """ from openquake.commonlib.calculators import base # create the current job job = create_job(user_name=username, log_level=log_level) models.JobStats.objects.create(oq_job=job) with logs.handle(job, log_level): # read calculation params and create the calculation profile params = readinput.get_params(cfg_files) params['hazard_output_id'] = None params['hazard_calculation_id'] = None params.update(extras) # build and validate an OqParam object oqparam = readinput.get_oqparam(params, calculators=base.calculators) oqparam.concurrent_tasks = int(config.get('celery', 'concurrent_tasks')) job.save_params(vars(oqparam)) job.save() return job
def __init__(self, job: str, job_ini, log_level='info', log_file=None, user_name=None, hc_id=None): self.job = job self.log_level = log_level self.log_file = log_file self.user_name = user_name if isinstance(job_ini, dict): # dictionary of parameters self.params = job_ini else: # path to job.ini file self.params = readinput.get_params(job_ini) self.params['hazard_calculation_id'] = hc_id if job: self.calc_id = dbcmd('create_job', get_datadir(), self.params['calculation_mode'], self.params['description'], user_name, hc_id) else: self.calc_id = get_last_calc_id() + 1
def zip_job(job_ini, archive_zip='', risk_ini='', oq=None, log=logging.info): """ Zip the given job.ini file into the given archive, together with all related files. """ if not os.path.exists(job_ini): sys.exit('%s does not exist' % job_ini) archive_zip = archive_zip or 'job.zip' if isinstance(archive_zip, str): # actually it should be path-like if not archive_zip.endswith('.zip'): sys.exit('%s does not end with .zip' % archive_zip) if os.path.exists(archive_zip): sys.exit('%s exists already' % archive_zip) # do not validate to avoid permissions error on the export_dir oq = oq or readinput.get_oqparam(job_ini, validate=False) if risk_ini: risk_ini = os.path.normpath(os.path.abspath(risk_ini)) risk_inputs = readinput.get_params([risk_ini])['inputs'] del risk_inputs['job_ini'] oq.inputs.update(risk_inputs) files = readinput.get_input_files(oq) if risk_ini: files = [risk_ini] + files return general.zipfiles(files, archive_zip, log=log)
def job_from_file(cfg_file_path, username, log_level='info', exports='', hazard_output_id=None, hazard_calculation_id=None, **extras): """ Create a full job profile from a job config file. :param str cfg_file_path: Path to the job.ini. :param str username: The user who will own this job profile and all results. :param str log_level: Desired log level. :param exports: Comma-separated sting of desired export types. :param int hazard_output_id: ID of a hazard output to use as input to this calculation. Specify this xor ``hazard_calculation_id``. :param int hazard_calculation_id: ID of a complete hazard job to use as input to this calculation. Specify this xor ``hazard_output_id``. :params extras: Extra parameters (used only in the tests to override the params) :returns: :class:`openquake.engine.db.models.OqJob` object :raises: `RuntimeError` if the input job configuration is not valid """ assert os.path.exists(cfg_file_path), cfg_file_path from openquake.engine.calculators import calculators # determine the previous hazard job, if any if hazard_calculation_id: haz_job = models.OqJob.objects.get(pk=hazard_calculation_id) elif hazard_output_id: # extract the hazard job from the hazard_output_id haz_job = models.Output.objects.get(pk=hazard_output_id).oq_job else: haz_job = None # no previous hazard job # create the current job job = create_job(user_name=username, log_level=log_level) models.JobStats.objects.create(oq_job=job) with logs.handle(job, log_level): # read calculation params and create the calculation profile params = readinput.get_params([cfg_file_path]) # TODO: improve the logic before; it is very hackish we should # change the call in server.views.submit_job to pass the temporary dir if not exports: # when called from the engine server # ignore the user-provided export_dir: the engine server will # export on demand with its own mechanism on a temporary directory params['export_dir'] = tempfile.gettempdir() params.update(extras) if haz_job: # for risk calculations calcmode = params['calculation_mode'] check_hazard_risk_consistency(haz_job, calcmode) if haz_job.user_name != username: logs.LOG.warn( 'You are using a hazard calculation ran by %s', haz_job.user_name) if hazard_output_id and params.get('quantile_loss_curves'): logs.LOG.warn( 'quantile_loss_curves is on, but you passed a single ' 'hazard output: the statistics will not be computed') # build and validate an OqParam object oqparam = readinput.get_oqparam(params, calculators=calculators) oqparam.hazard_calculation_id = \ haz_job.id if haz_job and not hazard_output_id else None oqparam.hazard_output_id = hazard_output_id params = vars(oqparam).copy() if haz_job: params['hazard_calculation_id'] = haz_job.id if hazard_output_id is None and hazard_calculation_id is None: # this is a hazard calculation, not a risk one job.save_params(params) del params['hazard_calculation_id'] del params['hazard_output_id'] else: # this is a risk calculation if 'maximum_distance' in params: raise NameError( 'The name of the parameter `maximum_distance` for risk ' 'calculators has changed.\nIt is now `asset_hazard_distance`. ' 'Please change your risk .ini file.\nNB: do NOT ' 'change the maximum_distance in the hazard .ini file!') job.hazard_calculation = haz_job hc = haz_job.get_oqparam() # copy the non-conflicting hazard parameters in the risk parameters for name, value in hc: if name not in params: params[name] = value params['hazard_investigation_time'] = hc.investigation_time params['hazard_imtls'] = dict(hc.imtls) cfd = hc.continuous_fragility_discretization if cfd and cfd != oqparam.continuous_fragility_discretization: raise RuntimeError( 'The hazard parameter continuous_fragility_discretization ' 'was %d but the risk one is %d' % ( hc.continuous_fragility_discretization, oqparam.continuous_fragility_discretization)) job.save_params(params) job.save() return job
def __init__(self, job_ini, event_info, no_distribute=False): """ Initialize a calculation (reinvented from openquake.engine.engine) :param job_ini: Path to configuration file/archive or dictionary of parameters with at least a key "calculation_mode" """ user_name = getpass.getuser() if no_distribute: os.environ['OQ_DISTRIBUTE'] = 'no' # check if the datadir exists datadir = datastore.get_datadir() if not os.path.exists(datadir): os.makedirs(datadir) #dbserver.ensure_on() if dbserver.get_status() == 'not-running': if config.dbserver.multi_user: sys.exit('Please start the DbServer: ' 'see the documentation for details') # otherwise start the DbServer automatically; NB: I tried to use # multiprocessing.Process(target=run_server).start() and apparently # it works, but then run-demos.sh hangs after the end of the first # calculation, but only if the DbServer is started by oq engine (!?) # Here is a trick to activate OpenQuake's dbserver # We first cd to the openquake directory and invoke subprocess to open/hold on dbserver # Then, we cd back to the original working directory owd = os.getcwd() os.chdir(os.path.dirname(os.path.realpath(__file__))) subprocess.Popen([ sys.executable, '-m', 'openquake.commands', 'dbserver', 'start' ]) os.chdir(owd) # wait for the dbserver to start waiting_seconds = 30 while dbserver.get_status() == 'not-running': if waiting_seconds == 0: sys.exit( 'The DbServer cannot be started after 30 seconds. ' 'Please check the configuration') time.sleep(1) waiting_seconds -= 1 # check if we are talking to the right server err = dbserver.check_foreign() if err: sys.exit(err) # Copy the event_info self.event_info = event_info # Create a job #self.job = logs.init("job", job_ini, logging.INFO, None, None, None) dic = readinput.get_params(job_ini) #dic['hazard_calculation_id'] = self.job.calc_id # Create the job log self.log = logs.init('job', dic, logging.INFO, None, None, None) # Get openquake parameters self.oqparam = self.log.get_oqparam() # Create the calculator self.calculator = base.calculators(self.oqparam, self.log.calc_id) self.calculator.from_engine = True print('FetchOpenQuake: OpenQuake Hazard Calculator initiated.')
def job_from_file(cfg_file_path, username, log_level='info', exports='', hazard_output_id=None, hazard_calculation_id=None, **extras): """ Create a full job profile from a job config file. :param str cfg_file_path: Path to the job.ini. :param str username: The user who will own this job profile and all results. :param str log_level: Desired log level. :param exports: Comma-separated sting of desired export types. :param int hazard_output_id: ID of a hazard output to use as input to this calculation. Specify this xor ``hazard_calculation_id``. :param int hazard_calculation_id: ID of a complete hazard job to use as input to this calculation. Specify this xor ``hazard_output_id``. :params extras: Extra parameters (used only in the tests to override the params) :returns: :class:`openquake.engine.db.models.OqJob` object :raises: `RuntimeError` if the input job configuration is not valid """ assert os.path.exists(cfg_file_path), cfg_file_path from openquake.engine.calculators import calculators # determine the previous hazard job, if any if hazard_calculation_id: haz_job = models.OqJob.objects.get(pk=hazard_calculation_id) elif hazard_output_id: # extract the hazard job from the hazard_output_id haz_job = models.Output.objects.get(pk=hazard_output_id).oq_job else: haz_job = None # no previous hazard job # create the current job job = create_job(user_name=username, log_level=log_level) models.JobStats.objects.create(oq_job=job) with logs.handle(job, log_level): # read calculation params and create the calculation profile params = readinput.get_params([cfg_file_path]) # TODO: improve the logic before; it is very hackish we should # change the call in server.views.submit_job to pass the temporary dir if not exports: # when called from the engine server # ignore the user-provided export_dir: the engine server will # export on demand with its own mechanism on a temporary directory params['export_dir'] = tempfile.gettempdir() params.update(extras) if haz_job: # for risk calculations calcmode = params['calculation_mode'] check_hazard_risk_consistency(haz_job, calcmode) if haz_job.user_name != username: logs.LOG.warn('You are using a hazard calculation ran by %s', haz_job.user_name) if hazard_output_id and params.get('quantile_loss_curves'): logs.LOG.warn( 'quantile_loss_curves is on, but you passed a single ' 'hazard output: the statistics will not be computed') # build and validate an OqParam object oqparam = readinput.get_oqparam(params, calculators=calculators) oqparam.hazard_calculation_id = \ haz_job.id if haz_job and not hazard_output_id else None oqparam.hazard_output_id = hazard_output_id params = vars(oqparam).copy() if haz_job: params['hazard_calculation_id'] = haz_job.id if hazard_output_id is None and hazard_calculation_id is None: # this is a hazard calculation, not a risk one job.save_params(params) del params['hazard_calculation_id'] del params['hazard_output_id'] else: # this is a risk calculation if 'maximum_distance' in params: raise NameError( 'The name of the parameter `maximum_distance` for risk ' 'calculators has changed.\nIt is now `asset_hazard_distance`. ' 'Please change your risk .ini file.\nNB: do NOT ' 'change the maximum_distance in the hazard .ini file!') job.hazard_calculation = haz_job hc = haz_job.get_oqparam() # copy the non-conflicting hazard parameters in the risk parameters for name, value in hc: if name not in params: params[name] = value params['hazard_investigation_time'] = hc.investigation_time params['hazard_imtls'] = dict(hc.imtls) cfd = hc.continuous_fragility_discretization if cfd and cfd != oqparam.continuous_fragility_discretization: raise RuntimeError( 'The hazard parameter continuous_fragility_discretization ' 'was %d but the risk one is %d' % (hc.continuous_fragility_discretization, oqparam.continuous_fragility_discretization)) job.save_params(params) job.save() return job