コード例 #1
0
ファイル: engine.py プロジェクト: preinh/oq-engine
def job_from_file_lite(cfg_file, username, log_level='info', exports='',
                       **extras):
    """
    Create a full job profile from a job config file.

    :param str cfg_file:
        Path to the job.ini files.
    :param str username:
        The user who will own this job profile and all results.
    :param str log_level:
        Desired log level.
    :param exports:
        Comma-separated sting of desired export types.
    :params extras:
        Extra parameters (used only in the tests to override the params)

    :returns:
        :class:`openquake.engine.db.models.OqJob` object
    :raises:
        `RuntimeError` if the input job configuration is not valid
    """
    from openquake.commonlib.calculators import base
    # create the current job
    job = create_job(user_name=username, log_level=log_level)
    models.JobStats.objects.create(oq_job=job)
    with logs.handle(job, log_level):
        # read calculation params and create the calculation profile
        params = readinput.get_params([cfg_file])
        params.update(extras)
        # build and validate an OqParam object
        oqparam = readinput.get_oqparam(params, calculators=base.calculators)
        job.save_params(vars(oqparam))
        job.save()
    return job
コード例 #2
0
ファイル: engine.py プロジェクト: julgp/oq-engine
def run_calc(job, log_level, log_file, exports):
    """
    Run a calculation.

    :param job:
        :class:`openquake.engine.db.model.OqJob` instance
    :param str log_level:
        The desired logging level. Valid choices are 'debug', 'info',
        'progress', 'warn', 'error', and 'critical'.
    :param str log_file:
        Complete path (including file name) to file where logs will be written.
        If `None`, logging will just be printed to standard output.
    :param exports:
        A comma-separated string of export types.
    """
    # let's import the calculator classes here, when they are needed
    # the reason is that the command `$ oq-engine --upgrade-db`
    # does not need them and would raise strange errors during installation
    # time if the PYTHONPATH is not set and commonlib is not visible
    from openquake.engine.calculators import calculators

    # first of all check the database version and exit if the db is outdated
    upgrader.check_versions(django_db.connections['admin'])

    calculator = calculators(job)
    with logs.handle(job, log_level, log_file), job_stats(job):  # run the job
        _do_run_calc(calculator, exports)
    return calculator
コード例 #3
0
    def wrapped(*args):
        """
        Initialize logs, make sure the job is still running, and run the task
        code surrounded by a try-except. If any error occurs, log it as a
        critical failure.
        """
        # the last argument is assumed to be a monitor
        monitor = args[-1]
        job = models.OqJob.objects.get(id=monitor.job_id)
        if job.is_running is False:
            # the job was killed, it is useless to run the task
            raise JobNotRunning(monitor.job_id)

        # it is important to save the task id soon, so that
        # the revoke functionality can work
        with monitor('storing task id', task=tsk, autoflush=True):
            pass

        with logs.handle(job):
            check_mem_usage()  # warn if too much memory is used
            # run the task
            try:
                total = 'total ' + task_func.__name__
                with monitor(total, task=tsk, autoflush=True):
                    return task_func(*args)
            finally:
                # save on the db
                CacheInserter.flushall()
                # the task finished, we can remove from the performance
                # table the associated row 'storing task id'
                models.Performance.objects.filter(
                    oq_job=job,
                    operation='storing task id',
                    task_id=tsk.request.id).delete()
コード例 #4
0
ファイル: tasks.py プロジェクト: acerisara/oq-engine
    def wrapped(*args):
        """
        Initialize logs, make sure the job is still running, and run the task
        code surrounded by a try-except. If any error occurs, log it as a
        critical failure.
        """
        # the last argument is assumed to be a monitor
        monitor = args[-1]
        job = models.OqJob.objects.get(id=monitor.job_id)
        if job.is_running is False:
            # the job was killed, it is useless to run the task
            raise JobNotRunning(monitor.job_id)

        # it is important to save the task id soon, so that
        # the revoke functionality can work
        with monitor("storing task id", task=tsk, autoflush=True):
            pass

        with logs.handle(job):
            check_mem_usage()  # warn if too much memory is used
            # run the task
            try:
                total = "total " + task_func.__name__
                with monitor(total, task=tsk):
                    with GroundShakingIntensityModel.forbid_instantiation():
                        return task_func(*args)
            finally:
                # save on the db
                CacheInserter.flushall()
                # the task finished, we can remove from the performance
                # table the associated row 'storing task id'
                models.Performance.objects.filter(
                    oq_job=job, operation="storing task id", task_id=tsk.request.id
                ).delete()
コード例 #5
0
ファイル: engine.py プロジェクト: mtahara/oq-engine
def run_calc(job_id,
             oqparam,
             log_level,
             log_file,
             exports,
             hazard_calculation_id=None):
    """
    Run a calculation.

    :param job_id:
        ID of the current job
    :param oqparam:
        :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param str log_level:
        The desired logging level. Valid choices are 'debug', 'info',
        'progress', 'warn', 'error', and 'critical'.
    :param str log_file:
        Complete path (including file name) to file where logs will be written.
        If `None`, logging will just be printed to standard output.
    :param exports:
        A comma-separated string of export types.
    """
    monitor = Monitor('total runtime', measuremem=True)
    with logs.handle(job_id, log_level, log_file):  # run the job
        if USE_CELERY and os.environ.get('OQ_DISTRIBUTE') == 'celery':
            set_concurrent_tasks_default()
        calc = base.calculators(oqparam, monitor, calc_id=job_id)
        tb = 'None\n'
        try:
            _do_run_calc(calc, exports, hazard_calculation_id)
            logs.dbcmd('finish', job_id, 'complete')
            expose_outputs(calc.datastore)
            records = views.performance_view(calc.datastore)
            logs.dbcmd('save_performance', job_id, records)
            calc.datastore.close()
            logs.LOG.info('Calculation %d finished correctly in %d seconds',
                          job_id, calc.monitor.duration)
        except:
            tb = traceback.format_exc()
            try:
                logs.LOG.critical(tb)
                logs.dbcmd('finish', job_id, 'failed')
            except:  # an OperationalError may always happen
                sys.stderr.write(tb)
            raise
        finally:
            # if there was an error in the calculation, this part may fail;
            # in such a situation, we simply log the cleanup error without
            # taking further action, so that the real error can propagate
            try:
                if USE_CELERY:
                    celery_cleanup(TERMINATE, parallel.TaskManager.task_ids)
            except:
                # log the finalization error only if there is no real error
                if tb == 'None\n':
                    logs.LOG.error('finalizing', exc_info=True)
    return calc
コード例 #6
0
ファイル: engine.py プロジェクト: rcgee/oq-engine
def run_calc(job_id, oqparam, log_level, log_file, exports,
             hazard_calculation_id=None):
    """
    Run a calculation.

    :param job_id:
        ID of the current job
    :param oqparam:
        :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param str log_level:
        The desired logging level. Valid choices are 'debug', 'info',
        'progress', 'warn', 'error', and 'critical'.
    :param str log_file:
        Complete path (including file name) to file where logs will be written.
        If `None`, logging will just be printed to standard output.
    :param exports:
        A comma-separated string of export types.
    """
    monitor = Monitor('total runtime', measuremem=True)
    with logs.handle(job_id, log_level, log_file):  # run the job
        if USE_CELERY and os.environ.get('OQ_DISTRIBUTE') == 'celery':
            set_concurrent_tasks_default()
        calc = base.calculators(oqparam, monitor, calc_id=job_id)
        tb = 'None\n'
        try:
            logs.dbcmd('set_status', job_id, 'executing')
            _do_run_calc(calc, exports, hazard_calculation_id)
            expose_outputs(calc.datastore)
            records = views.performance_view(calc.datastore)
            logs.dbcmd('save_performance', job_id, records)
            calc.datastore.close()
            logs.LOG.info('Calculation %d finished correctly in %d seconds',
                          job_id, calc.monitor.duration)
            logs.dbcmd('finish', job_id, 'complete')
        except:
            tb = traceback.format_exc()
            try:
                logs.LOG.critical(tb)
                logs.dbcmd('finish', job_id, 'failed')
            except:  # an OperationalError may always happen
                sys.stderr.write(tb)
            raise
        finally:
            # if there was an error in the calculation, this part may fail;
            # in such a situation, we simply log the cleanup error without
            # taking further action, so that the real error can propagate
            try:
                if USE_CELERY:
                    celery_cleanup(TERMINATE, parallel.TaskManager.task_ids)
            except:
                # log the finalization error only if there is no real error
                if tb == 'None\n':
                    logs.LOG.error('finalizing', exc_info=True)
    return calc
コード例 #7
0
ファイル: engine.py プロジェクト: preinh/oq-engine
def run_calc(job, log_level, log_file, exports, lite=False):
    """
    Run a calculation.

    :param job:
        :class:`openquake.engine.db.model.OqJob` instance
    :param str log_level:
        The desired logging level. Valid choices are 'debug', 'info',
        'progress', 'warn', 'error', and 'critical'.
    :param str log_file:
        Complete path (including file name) to file where logs will be written.
        If `None`, logging will just be printed to standard output.
    :param exports:
        A comma-separated string of export types.
    :param lite:
        Flag set when the oq-lite calculators are used
    """
    # let's import the calculator classes here, when they are needed;
    # the reason is that the command `$ oq-engine --upgrade-db`
    # does not need them and would raise strange errors during installation
    # time if the PYTHONPATH is not set and commonlib is not visible
    if lite:
        calc_dir = os.path.join(datastore.DATADIR, 'calc_%d' % job.id)
        if os.path.exists(calc_dir):
            os.rename(calc_dir, calc_dir + '.bak')
            print 'Generated %s.bak' % calc_dir
        from openquake.commonlib.calculators import base
        calculator = base.calculators(job.get_oqparam(), calc_id=job.id)
        calculator.job = job
        calculator.monitor = EnginePerformanceMonitor('', job.id)
    else:
        from openquake.engine.calculators import calculators
        calculator = calculators(job)

    # first of all check the database version and exit if the db is outdated
    upgrader.check_versions(django_db.connections['admin'])
    with logs.handle(job, log_level, log_file), job_stats(job):  # run the job
        try:
            _do_run_calc(calculator, exports)
        except:
            tb = traceback.format_exc()
            logs.LOG.critical(tb)
            raise
    return calculator
コード例 #8
0
ファイル: engine.py プロジェクト: ChristieHale/oq-engine
def job_from_files(cfg_files,
                   username,
                   log_level='info',
                   exports='',
                   **extras):
    """
    Create a full job profile from a job config file.

    :param str cfg_files_path:
        Path to the job.ini files.
    :param str username:
        The user who will own this job profile and all results.
    :param str log_level:
        Desired log level.
    :param exports:
        Comma-separated sting of desired export types.
    :params extras:
        Extra parameters (used only in the tests to override the params)

    :returns:
        :class:`openquake.engine.db.models.OqJob` object
    :raises:
        `RuntimeError` if the input job configuration is not valid
    """
    from openquake.commonlib.calculators import base
    # create the current job
    job = create_job(user_name=username, log_level=log_level)
    models.JobStats.objects.create(oq_job=job)
    with logs.handle(job, log_level):
        # read calculation params and create the calculation profile
        params = readinput.get_params(cfg_files)
        params['hazard_output_id'] = None
        params['hazard_calculation_id'] = None
        params.update(extras)
        # build and validate an OqParam object
        oqparam = readinput.get_oqparam(params, calculators=base.calculators)
        oqparam.concurrent_tasks = int(config.get('celery',
                                                  'concurrent_tasks'))
        job.save_params(vars(oqparam))
        job.save()
    return job
コード例 #9
0
ファイル: engine.py プロジェクト: ChristieHale/oq-engine
def run_calc(job, log_level, log_file, exports, lite=False):
    """
    Run a calculation.

    :param job:
        :class:`openquake.engine.db.model.OqJob` instance
    :param str log_level:
        The desired logging level. Valid choices are 'debug', 'info',
        'progress', 'warn', 'error', and 'critical'.
    :param str log_file:
        Complete path (including file name) to file where logs will be written.
        If `None`, logging will just be printed to standard output.
    :param exports:
        A comma-separated string of export types.
    :param lite:
        Flag set when the oq-lite calculators are used
    """
    # let's import the calculator classes here, when they are needed
    # the reason is that the command `$ oq-engine --upgrade-db`
    # does not need them and would raise strange errors during installation
    # time if the PYTHONPATH is not set and commonlib is not visible
    if lite:
        from openquake.commonlib.calculators import base
        calculator = base.calculators(job.get_oqparam())
        calculator.job = job
        calculator.monitor = EnginePerformanceMonitor('', job.id)
    else:
        from openquake.engine.calculators import calculators
        calculator = calculators(job)

    # first of all check the database version and exit if the db is outdated
    upgrader.check_versions(django_db.connections['admin'])
    with logs.handle(job, log_level, log_file), job_stats(job):  # run the job
        try:
            _do_run_calc(calculator, exports)
        except:
            tb = traceback.format_exc()
            logs.LOG.critical(tb)
            raise
    return calculator
コード例 #10
0
ファイル: engine.py プロジェクト: julgp/oq-engine
def job_from_file(cfg_file_path, username, log_level='info', exports='',
                  hazard_output_id=None, hazard_calculation_id=None, **extras):
    """
    Create a full job profile from a job config file.

    :param str cfg_file_path:
        Path to the job.ini.
    :param str username:
        The user who will own this job profile and all results.
    :param str log_level:
        Desired log level.
    :param exports:
        Comma-separated sting of desired export types.
    :param int hazard_output_id:
        ID of a hazard output to use as input to this calculation. Specify
        this xor ``hazard_calculation_id``.
    :param int hazard_calculation_id:
        ID of a complete hazard job to use as input to this
        calculation. Specify this xor ``hazard_output_id``.
    :params extras:
        Extra parameters (used only in the tests to override the params)
    :returns:
        :class:`openquake.engine.db.models.OqJob` object
    :raises:
        `RuntimeError` if the input job configuration is not valid
    """
    assert os.path.exists(cfg_file_path), cfg_file_path

    from openquake.engine.calculators import calculators

    # determine the previous hazard job, if any
    if hazard_calculation_id:
        haz_job = models.OqJob.objects.get(pk=hazard_calculation_id)
    elif hazard_output_id:  # extract the hazard job from the hazard_output_id
        haz_job = models.Output.objects.get(pk=hazard_output_id).oq_job
    else:
        haz_job = None  # no previous hazard job

    # create the current job
    job = prepare_job(user_name=username, log_level=log_level)
    models.JobStats.objects.create(oq_job=job)

    # read calculation params and create the calculation profile
    with logs.handle(job, log_level):
        oqparam = readinput.get_oqparam(cfg_file_path, calculators)
        oqparam.hazard_calculation_id = \
            haz_job.id if haz_job and not hazard_output_id else None
        oqparam.hazard_output_id = hazard_output_id
        vars(oqparam).update(extras)

    if haz_job:  # for risk calculations
        check_hazard_risk_consistency(haz_job, oqparam.calculation_mode)
        if haz_job.user_name != username:
            logs.LOG.warn(
                'You are using a hazard calculation ran by %s',
                haz_job.user_name)
        if oqparam.hazard_output_id and getattr(
                oqparam, 'quantile_loss_curves', False):
            logs.LOG.warn(
                'quantile_loss_curves is on, but you passed a single hazard '
                'output: the statistics will not be computed')

    params = vars(oqparam).copy()
    if 'quantile_loss_curves' not in params:
        params['quantile_loss_curves'] = []
    if 'poes_disagg' not in params:
        params['poes_disagg'] = []
    if 'sites_disagg' not in params:
        params['sites_disagg'] = []
    if 'specific_assets' not in params:
        params['specific_assets'] = []
    if 'conditional_loss_poes' not in params:
        params['conditional_loss_poes'] = []
    if haz_job:
        params['hazard_calculation_id'] = haz_job.id
    job.save_params(params)

    if hazard_output_id is None and hazard_calculation_id is None:
        # this is a hazard calculation, not a risk one
        del params['hazard_calculation_id']
        del params['hazard_output_id']
    else:  # this is a risk calculation
        job.hazard_calculation = haz_job

    job.save()
    return job
コード例 #11
0
ファイル: engine.py プロジェクト: preinh/oq-engine
def job_from_file(cfg_file_path, username, log_level='info', exports='',
                  hazard_output_id=None, hazard_calculation_id=None, **extras):
    """
    Create a full job profile from a job config file.

    :param str cfg_file_path:
        Path to the job.ini.
    :param str username:
        The user who will own this job profile and all results.
    :param str log_level:
        Desired log level.
    :param exports:
        Comma-separated sting of desired export types.
    :param int hazard_output_id:
        ID of a hazard output to use as input to this calculation. Specify
        this xor ``hazard_calculation_id``.
    :param int hazard_calculation_id:
        ID of a complete hazard job to use as input to this
        calculation. Specify this xor ``hazard_output_id``.
    :params extras:
        Extra parameters (used only in the tests to override the params)

    :returns:
        :class:`openquake.engine.db.models.OqJob` object
    :raises:
        `RuntimeError` if the input job configuration is not valid
    """
    assert os.path.exists(cfg_file_path), cfg_file_path

    from openquake.engine.calculators import calculators

    # determine the previous hazard job, if any
    if hazard_calculation_id:
        haz_job = models.OqJob.objects.get(pk=hazard_calculation_id)
    elif hazard_output_id:  # extract the hazard job from the hazard_output_id
        haz_job = models.Output.objects.get(pk=hazard_output_id).oq_job
    else:
        haz_job = None  # no previous hazard job

    # create the current job
    job = create_job(user_name=username, log_level=log_level)
    models.JobStats.objects.create(oq_job=job)
    with logs.handle(job, log_level):
        # read calculation params and create the calculation profile
        params = readinput.get_params([cfg_file_path])
        # TODO: improve the logic before; it is very hackish we should
        # change the call in server.views.submit_job to pass the temporary dir
        if not exports:  # when called from the engine server
            # ignore the user-provided export_dir: the engine server will
            # export on demand with its own mechanism on a temporary directory
            params['export_dir'] = tempfile.gettempdir()
        params.update(extras)
        if haz_job:  # for risk calculations
            calcmode = params['calculation_mode']
            check_hazard_risk_consistency(haz_job, calcmode)
            if haz_job.user_name != username:
                logs.LOG.warn(
                    'You are using a hazard calculation ran by %s',
                    haz_job.user_name)
            if hazard_output_id and params.get('quantile_loss_curves'):
                logs.LOG.warn(
                    'quantile_loss_curves is on, but you passed a single '
                    'hazard output: the statistics will not be computed')

        # build and validate an OqParam object
        oqparam = readinput.get_oqparam(params, calculators=calculators)
        oqparam.hazard_calculation_id = \
            haz_job.id if haz_job and not hazard_output_id else None
        oqparam.hazard_output_id = hazard_output_id

    params = vars(oqparam).copy()
    if haz_job:
        params['hazard_calculation_id'] = haz_job.id

    if hazard_output_id is None and hazard_calculation_id is None:
        # this is a hazard calculation, not a risk one
        job.save_params(params)
        del params['hazard_calculation_id']
        del params['hazard_output_id']
    else:  # this is a risk calculation
        if 'maximum_distance' in params:
            raise NameError(
                'The name of the parameter `maximum_distance` for risk '
                'calculators has changed.\nIt is now `asset_hazard_distance`. '
                'Please change your risk .ini file.\nNB: do NOT '
                'change the maximum_distance in the hazard .ini file!')

        job.hazard_calculation = haz_job
        hc = haz_job.get_oqparam()
        # copy the non-conflicting hazard parameters in the risk parameters
        for name, value in hc:
            if name not in params:
                params[name] = value
        params['hazard_investigation_time'] = hc.investigation_time
        params['hazard_imtls'] = dict(hc.imtls)
        cfd = hc.continuous_fragility_discretization
        if cfd and cfd != oqparam.continuous_fragility_discretization:
            raise RuntimeError(
                'The hazard parameter continuous_fragility_discretization '
                'was %d but the risk one is %d' % (
                    hc.continuous_fragility_discretization,
                    oqparam.continuous_fragility_discretization))
        job.save_params(params)

    job.save()
    return job
コード例 #12
0
ファイル: engine.py プロジェクト: ChristieHale/oq-engine
def job_from_file(cfg_file_path,
                  username,
                  log_level='info',
                  exports='',
                  hazard_output_id=None,
                  hazard_calculation_id=None,
                  **extras):
    """
    Create a full job profile from a job config file.

    :param str cfg_file_path:
        Path to the job.ini.
    :param str username:
        The user who will own this job profile and all results.
    :param str log_level:
        Desired log level.
    :param exports:
        Comma-separated sting of desired export types.
    :param int hazard_output_id:
        ID of a hazard output to use as input to this calculation. Specify
        this xor ``hazard_calculation_id``.
    :param int hazard_calculation_id:
        ID of a complete hazard job to use as input to this
        calculation. Specify this xor ``hazard_output_id``.
    :params extras:
        Extra parameters (used only in the tests to override the params)

    :returns:
        :class:`openquake.engine.db.models.OqJob` object
    :raises:
        `RuntimeError` if the input job configuration is not valid
    """
    assert os.path.exists(cfg_file_path), cfg_file_path

    from openquake.engine.calculators import calculators

    # determine the previous hazard job, if any
    if hazard_calculation_id:
        haz_job = models.OqJob.objects.get(pk=hazard_calculation_id)
    elif hazard_output_id:  # extract the hazard job from the hazard_output_id
        haz_job = models.Output.objects.get(pk=hazard_output_id).oq_job
    else:
        haz_job = None  # no previous hazard job

    # create the current job
    job = create_job(user_name=username, log_level=log_level)
    models.JobStats.objects.create(oq_job=job)
    with logs.handle(job, log_level):
        # read calculation params and create the calculation profile
        params = readinput.get_params([cfg_file_path])
        # TODO: improve the logic before; it is very hackish we should
        # change the call in server.views.submit_job to pass the temporary dir
        if not exports:  # when called from the engine server
            # ignore the user-provided export_dir: the engine server will
            # export on demand with its own mechanism on a temporary directory
            params['export_dir'] = tempfile.gettempdir()
        params.update(extras)
        if haz_job:  # for risk calculations
            calcmode = params['calculation_mode']
            check_hazard_risk_consistency(haz_job, calcmode)
            if haz_job.user_name != username:
                logs.LOG.warn('You are using a hazard calculation ran by %s',
                              haz_job.user_name)
            if hazard_output_id and params.get('quantile_loss_curves'):
                logs.LOG.warn(
                    'quantile_loss_curves is on, but you passed a single '
                    'hazard output: the statistics will not be computed')

        # build and validate an OqParam object
        oqparam = readinput.get_oqparam(params, calculators=calculators)
        oqparam.hazard_calculation_id = \
            haz_job.id if haz_job and not hazard_output_id else None
        oqparam.hazard_output_id = hazard_output_id

    params = vars(oqparam).copy()
    if haz_job:
        params['hazard_calculation_id'] = haz_job.id

    if hazard_output_id is None and hazard_calculation_id is None:
        # this is a hazard calculation, not a risk one
        job.save_params(params)
        del params['hazard_calculation_id']
        del params['hazard_output_id']
    else:  # this is a risk calculation
        if 'maximum_distance' in params:
            raise NameError(
                'The name of the parameter `maximum_distance` for risk '
                'calculators has changed.\nIt is now `asset_hazard_distance`. '
                'Please change your risk .ini file.\nNB: do NOT '
                'change the maximum_distance in the hazard .ini file!')

        job.hazard_calculation = haz_job
        hc = haz_job.get_oqparam()
        # copy the non-conflicting hazard parameters in the risk parameters
        for name, value in hc:
            if name not in params:
                params[name] = value
        params['hazard_investigation_time'] = hc.investigation_time
        params['hazard_imtls'] = dict(hc.imtls)
        cfd = hc.continuous_fragility_discretization
        if cfd and cfd != oqparam.continuous_fragility_discretization:
            raise RuntimeError(
                'The hazard parameter continuous_fragility_discretization '
                'was %d but the risk one is %d' %
                (hc.continuous_fragility_discretization,
                 oqparam.continuous_fragility_discretization))
        job.save_params(params)

    job.save()
    return job