Ejemplo n.º 1
0
def check_input(job_ini_or_zip_or_nrmls):
    for job_ini_or_zip_or_nrml in job_ini_or_zip_or_nrmls:
        if job_ini_or_zip_or_nrml.endswith('.xml'):
            try:
                print(nrml.to_python(job_ini_or_zip_or_nrml))
            except Exception as exc:
                sys.exit(exc)
        else:
            base.calculators(
                readinput.get_oqparam(job_ini_or_zip_or_nrml)).read_inputs()
Ejemplo n.º 2
0
def run2(job_haz, job_risk, concurrent_tasks, pdb, exports, params):
    """
    Run both hazard and risk, one after the other
    """
    hcalc = base.calculators(readinput.get_oqparam(job_haz))
    hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb,
              exports=exports, **params)
    hc_id = hcalc.datastore.calc_id
    oq = readinput.get_oqparam(job_risk, hc_id=hc_id)
    rcalc = base.calculators(oq)
    rcalc.run(pdb=pdb, exports=exports, hazard_calculation_id=hc_id, **params)
    return rcalc
Ejemplo n.º 3
0
def run2(job_haz, job_risk, concurrent_tasks, pdb, exports, monitor):
    """
    Run both hazard and risk, one after the other
    """
    hcalc = base.calculators(readinput.get_oqparam(job_haz), monitor)
    with monitor:
        hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports)
        hc_id = hcalc.datastore.calc_id
        oq = readinput.get_oqparam(job_risk, hc_id=hc_id)
        rcalc = base.calculators(oq, monitor)
        rcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports,
                  hazard_calculation_id=hc_id)
    return rcalc
Ejemplo n.º 4
0
def run2(job_haz, job_risk, calc_id, concurrent_tasks, pdb, loglevel,
         exports, params):
    """
    Run both hazard and risk, one after the other
    """
    hcalc = base.calculators(readinput.get_oqparam(job_haz), calc_id)
    hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb,
              exports=exports, **params)
    hc_id = hcalc.datastore.calc_id
    rcalc_id = logs.init(level=getattr(logging, loglevel.upper()))
    oq = readinput.get_oqparam(job_risk, hc_id=hc_id)
    rcalc = base.calculators(oq, rcalc_id)
    rcalc.run(pdb=pdb, exports=exports, **params)
    return rcalc
Ejemplo n.º 5
0
def build_report(job_ini, output_dir=None):
    """
    Write a `report.csv` file with information about the calculation
    without running it

    :param job_ini:
        full pathname of the job.ini file
    :param output_dir:
        the directory where the report is written (default the input directory)
    """
    oq = readinput.get_oqparam(job_ini)
    output_dir = output_dir or os.path.dirname(job_ini)
    from openquake.calculators import base  # ugly
    calc = base.calculators(oq)
    calc.save_params()  # needed to save oqparam

    # some taken is care so that the real calculation is not run:
    # the goal is to extract information about the source management only
    p = mock.patch.object
    with p(PSHACalculator, 'core_task', count_eff_ruptures):
        if calc.pre_calculator == 'event_based_risk':
            # compute the ruptures only, not the risk
            calc.pre_calculator = 'event_based_rupture'
        calc.pre_execute()
    if hasattr(calc, '_composite_source_model'):
        calc.datastore['csm_info'] = calc.csm.info
    rw = ReportWriter(calc.datastore)
    rw.make_report()
    report = (os.path.join(output_dir, 'report.rst')
              if output_dir else calc.datastore.export_path('report.rst'))
    try:
        rw.save(report)
    except IOError as exc:  # permission error
        sys.stderr.write(str(exc) + '\n')
    return report
Ejemplo n.º 6
0
def check_input(job_ini_or_zip_or_nrmls):
    for job_ini_or_zip_or_nrml in job_ini_or_zip_or_nrmls:
        if job_ini_or_zip_or_nrml.endswith('.xml'):
            try:
                node = nrml.to_python(job_ini_or_zip_or_nrml)
                if node.tag.endswith('exposureModel'):
                    err = Exposure.check(job_ini_or_zip_or_nrml)
                    if err:
                        logging.warning(err)
                else:
                    logging.info('Checked %s', job_ini_or_zip_or_nrml)
            except Exception as exc:
                sys.exit(exc)
        else:
            oq = readinput.get_oqparam(job_ini_or_zip_or_nrml)
            base.calculators(oq, logs.init()).read_inputs()
Ejemplo n.º 7
0
def _run(job_ini, concurrent_tasks, pdb, reuse_input, loglevel, exports,
         params):
    global calc_path
    if 'hazard_calculation_id' in params:
        hc_id = int(params['hazard_calculation_id'])
        if hc_id < 0:  # interpret negative calculation ids
            calc_ids = datastore.get_calc_ids()
            try:
                params['hazard_calculation_id'] = calc_ids[hc_id]
            except IndexError:
                raise SystemExit('There are %d old calculations, cannot '
                                 'retrieve the %s' % (len(calc_ids), hc_id))
        else:
            params['hazard_calculation_id'] = hc_id
    dic = readinput.get_params(job_ini, params)
    # set the logs first of all
    log = logs.init("job", dic, getattr(logging, loglevel.upper()))

    # disable gzip_input
    base.BaseCalculator.gzip_inputs = lambda self: None
    with log, performance.Monitor('total runtime', measuremem=True) as monitor:
        calc = base.calculators(log.get_oqparam(), log.calc_id)
        if reuse_input:  # enable caching
            calc.oqparam.cachedir = datastore.get_datadir()
        calc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports)

    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    print('See the output with silx view %s' % calc.datastore.filename)
    calc_path, _ = os.path.splitext(calc.datastore.filename)  # used below
    return calc
Ejemplo n.º 8
0
def build_report(job_ini, output_dir=None):
    """
    Write a `report.csv` file with information about the calculation
    without running it

    :param job_ini:
        full pathname of the job.ini file
    :param output_dir:
        the directory where the report is written (default the input directory)
    """
    oq = readinput.get_oqparam(job_ini)
    oq.ground_motion_fields = False
    output_dir = output_dir or os.path.dirname(job_ini)
    from openquake.calculators import base  # ugly
    calc = base.calculators(oq)
    calc.save_params()  # needed to save oqparam

    # some taken is care so that the real calculation is not run:
    # the goal is to extract information about the source management only
    calc.pre_execute()
    rw = ReportWriter(calc.datastore)
    rw.make_report()
    report = (os.path.join(output_dir, 'report.rst') if output_dir
              else calc.datastore.export_path('report.rst'))
    try:
        rw.save(report)
    except IOError as exc:  # permission error
        sys.stderr.write(str(exc) + '\n')
    return report
Ejemplo n.º 9
0
def run_calc(job, log_level, log_file, exports, lite=False):
    """
    Run a calculation.

    :param job:
        :class:`openquake.engine.db.model.OqJob` instance
    :param str log_level:
        The desired logging level. Valid choices are 'debug', 'info',
        'progress', 'warn', 'error', and 'critical'.
    :param str log_file:
        Complete path (including file name) to file where logs will be written.
        If `None`, logging will just be printed to standard output.
    :param exports:
        A comma-separated string of export types.
    :param lite:
        Flag set when the oq-lite calculators are used
    """
    # let's import the calculator classes here, when they are needed;
    # the reason is that the command `$ oq-engine --upgrade-db`
    # does not need them and would raise strange errors during installation
    # time if the PYTHONPATH is not set and commonlib is not visible
    if lite:
        from openquake.calculators import base
        calculator = base.calculators(job.get_oqparam(), calc_id=job.id)
        calculator.job = job
        calculator.monitor = EnginePerformanceMonitor('', job.id)
    else:
        from openquake.engine.calculators import calculators
        calculator = calculators(job)

    # first of all check the database version and exit if the db is outdated
    upgrader.check_versions(django_db.connections['admin'])
    with logs.handle(job, log_level, log_file), job_stats(job):  # run the job
        _do_run_calc(calculator, exports)
    return calculator
Ejemplo n.º 10
0
def run_calc(job_id,
             oqparam,
             log_level,
             log_file,
             exports,
             hazard_calculation_id=None,
             **kw):
    """
    Run a calculation.

    :param job_id:
        ID of the current job
    :param oqparam:
        :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param str log_level:
        The desired logging level. Valid choices are 'debug', 'info',
        'progress', 'warn', 'error', and 'critical'.
    :param str log_file:
        Complete path (including file name) to file where logs will be written.
        If `None`, logging will just be printed to standard output.
    :param exports:
        A comma-separated string of export types.
    """
    monitor = Monitor('total runtime', measuremem=True)
    with logs.handle(job_id, log_level, log_file):  # run the job
        if USE_CELERY and os.environ.get('OQ_DISTRIBUTE') == 'celery':
            set_concurrent_tasks_default()
        calc = base.calculators(oqparam, monitor, calc_id=job_id)
        calc.from_engine = True
        tb = 'None\n'
        try:
            logs.dbcmd('set_status', job_id, 'executing')
            _do_run_calc(calc, exports, hazard_calculation_id, **kw)
            expose_outputs(calc.datastore)
            records = views.performance_view(calc.datastore)
            logs.dbcmd('save_performance', job_id, records)
            calc.datastore.close()
            logs.LOG.info('Calculation %d finished correctly in %d seconds',
                          job_id, calc._monitor.duration)
            logs.dbcmd('finish', job_id, 'complete')
        except:
            tb = traceback.format_exc()
            try:
                logs.LOG.critical(tb)
                logs.dbcmd('finish', job_id, 'failed')
            except:  # an OperationalError may always happen
                sys.stderr.write(tb)
            raise
        finally:
            # if there was an error in the calculation, this part may fail;
            # in such a situation, we simply log the cleanup error without
            # taking further action, so that the real error can propagate
            try:
                if USE_CELERY:
                    celery_cleanup(TERMINATE, parallel.Starmap.task_ids)
            except:
                # log the finalization error only if there is no real error
                if tb == 'None\n':
                    logs.LOG.error('finalizing', exc_info=True)
    return calc
Ejemplo n.º 11
0
def build_report(job_ini, output_dir=None):
    """
    Write a `report.csv` file with information about the calculation.

    :param job_ini:
        full pathname of the job.ini file
    :param output_dir:
        the directory where the report is written (default the input directory)
    """
    oq = readinput.get_oqparam(job_ini)
    output_dir = output_dir or os.path.dirname(job_ini)
    calc = base.calculators(oq)
    calc.pre_execute()
    calc.save_params()
    ds = datastore.DataStore(calc.datastore.calc_id)
    rw = ReportWriter(ds)
    report = os.path.join(output_dir, 'report.rst')
    for name in ('params', 'inputs'):
        rw.add(name)
    if 'scenario' not in oq.calculation_mode:
        rw.add('csm_info')
    rw.add('rlzs_assoc', calc.rlzs_assoc)
    if 'num_ruptures' in ds:
        rw.add('rupture_collections')
        rw.add('col_rlz_assocs')
    elif 'scenario' not in oq.calculation_mode:
        rw.add('ruptures_by_trt')
    if oq.calculation_mode in ('classical', 'event_based', 'event_based_risk'):
        rw.add('data_transfer')
    if 'exposure' in oq.inputs:
        rw.add('exposure_info')
    rw.save(report)
    return report
Ejemplo n.º 12
0
def build_report(job_ini, output_dir=None):
    """
    Write a `report.csv` file with information about the calculation
    without running it

    :param job_ini:
        full pathname of the job.ini file
    :param output_dir:
        the directory where the report is written (default the input directory)
    """
    oq = readinput.get_oqparam(job_ini)
    output_dir = output_dir or os.path.dirname(job_ini)
    from openquake.calculators import base  # ugly
    calc = base.calculators(oq)
    calc.save_params()  # needed to save oqparam

    # some taken is care so that the real calculation is not run:
    # the goal is to extract information about the source management only
    with mock.patch.object(PSHACalculator, 'core_task', count_eff_ruptures):
        calc.pre_execute()
    if hasattr(calc, '_composite_source_model'):
        calc.datastore['csm_info'] = calc.csm.info
    rw = ReportWriter(calc.datastore)
    rw.make_report()
    report = (os.path.join(output_dir, 'report.rst') if output_dir
              else calc.datastore.export_path('report.rst'))
    try:
        rw.save(report)
    except IOError as exc:  # permission error
        sys.stderr.write(str(exc) + '\n')
    return report
Ejemplo n.º 13
0
def run(job_ini, concurrent_tasks=None, pdb=None,
        loglevel='info', hc=None, exports=''):
    """
    Run a calculation. Optionally, set the number of concurrent_tasks
    (0 to disable the parallelization).
    """
    logging.basicConfig(level=getattr(logging, loglevel.upper()))
    job_inis = job_ini.split(',')
    assert len(job_inis) in (1, 2), job_inis
    monitor = performance.PerformanceMonitor('total', measuremem=True)

    if len(job_inis) == 1:  # run hazard or risk
        oqparam = readinput.get_oqparam(job_inis[0], hc_id=hc)
        if hc and hc < 0:  # interpret negative calculation ids
            calc_ids = datastore.get_calc_ids()
            try:
                hc = calc_ids[hc]
            except IndexError:
                raise SystemExit('There are %d old calculations, cannot '
                                 'retrieve the %s' % (len(calc_ids), hc))
        calc = base.calculators(oqparam, monitor)
        with monitor:
            calc.run(concurrent_tasks=concurrent_tasks, pdb=pdb,
                     exports=exports, hazard_calculation_id=hc)
    else:  # run hazard + risk
        calc = run2(
            job_inis[0], job_inis[1], concurrent_tasks, pdb, exports, monitor)

    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    monitor.flush()
    print('See the output with hdfview %s' % calc.datastore.hdf5path)
    return calc
Ejemplo n.º 14
0
def build_report(job_ini, output_dir=None):
    """
    Write a `report.csv` file with information about the calculation
    without running it

    :param job_ini:
        full pathname of the job.ini file
    :param output_dir:
        the directory where the report is written (default the input directory)
    """
    oq = readinput.get_oqparam(job_ini)
    output_dir = output_dir or os.path.dirname(job_ini)
    calc = base.calculators(oq)
    # some taken is care so that the real calculation is not run:
    # the goal is to extract information about the source management only
    with mock.patch.object(
            calc.__class__, 'core_task', source.count_eff_ruptures):
        calc.pre_execute()
    with mock.patch.object(logging.root, 'info'):  # reduce logging
        calc.execute()
    calc.save_params()
    rw = ReportWriter(calc.datastore)
    rw.make_report()
    report = (os.path.join(output_dir, 'report.rst') if output_dir
              else calc.datastore.export_path('report.rst'))
    try:
        rw.save(report)
    except IOError as exc:  # permission error
        sys.stderr.write(str(exc) + '\n')
    return report
Ejemplo n.º 15
0
def _run(job_ini, concurrent_tasks, pdb, loglevel, hc, exports):
    global calc_path
    logging.basicConfig(level=getattr(logging, loglevel.upper()))
    job_inis = job_ini.split(',')
    assert len(job_inis) in (1, 2), job_inis
    monitor = performance.PerformanceMonitor(
        'total runtime', measuremem=True)

    if len(job_inis) == 1:  # run hazard or risk
        oqparam = readinput.get_oqparam(job_inis[0], hc_id=hc)
        if hc and hc < 0:  # interpret negative calculation ids
            calc_ids = datastore.get_calc_ids()
            try:
                hc = calc_ids[hc]
            except IndexError:
                raise SystemExit('There are %d old calculations, cannot '
                                 'retrieve the %s' % (len(calc_ids), hc))
        calc = base.calculators(oqparam, monitor)
        with monitor:
            calc.run(concurrent_tasks=concurrent_tasks, pdb=pdb,
                     exports=exports, hazard_calculation_id=hc)
    else:  # run hazard + risk
        calc = run2(
            job_inis[0], job_inis[1], concurrent_tasks, pdb, exports, monitor)

    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    monitor.flush()
    print('See the output with hdfview %s' % calc.datastore.hdf5path)
    calc_path = calc.datastore.calc_dir  # used to deduce the .pstat filename
    return calc
Ejemplo n.º 16
0
def run_calc(job_id, oqparam, exports, log_level='info', log_file=None, **kw):
    """
    Run a calculation.

    :param job_id:
        ID of the current job
    :param oqparam:
        :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param exports:
        A comma-separated string of export types.
    """
    register_signals()
    setproctitle('oq-job-%d' % job_id)
    logs.init(job_id, getattr(logging, log_level.upper()))
    with logs.handle(job_id, log_level, log_file):
        calc = base.calculators(oqparam, calc_id=job_id)
        logging.info('%s running %s [--hc=%s]', getpass.getuser(),
                     calc.oqparam.inputs['job_ini'],
                     calc.oqparam.hazard_calculation_id)
        logging.info('Using engine version %s', __version__)
        msg = check_obsolete_version(oqparam.calculation_mode)
        if msg:
            logging.warning(msg)
        calc.from_engine = True
        tb = 'None\n'
        try:
            if OQ_DISTRIBUTE.endswith('pool'):
                logging.warning('Using %d cores on %s', parallel.CT // 2,
                                platform.node())
            set_concurrent_tasks_default(calc)
            t0 = time.time()
            calc.run(exports=exports, **kw)
            logging.info('Exposing the outputs to the database')
            expose_outputs(calc.datastore)
            path = calc.datastore.filename
            size = general.humansize(os.path.getsize(path))
            logging.info('Stored %s on %s in %d seconds', size, path,
                         time.time() - t0)
            logs.dbcmd('finish', job_id, 'complete')
            calc.datastore.close()
            for line in logs.dbcmd('list_outputs', job_id, False):
                general.safeprint(line)
        except BaseException as exc:
            if isinstance(exc, MasterKilled):
                msg = 'aborted'
            else:
                msg = 'failed'
            tb = traceback.format_exc()
            try:
                logging.critical(tb)
                logs.dbcmd('finish', job_id, msg)
            except BaseException:  # an OperationalError may always happen
                sys.stderr.write(tb)
            raise
        finally:
            parallel.Starmap.shutdown()
    # sanity check to make sure that the logging on file is working
    if log_file and log_file != os.devnull and os.path.getsize(log_file) == 0:
        logging.warning('The log file %s is empty!?' % log_file)
    return calc
Ejemplo n.º 17
0
def build_report(job_ini, output_dir=None):
    """
    Write a `report.csv` file with information about the calculation
    without running it

    :param job_ini:
        full pathname of the job.ini file
    :param output_dir:
        the directory where the report is written (default the input directory)
    """
    calc_id = logs.init()
    oq = readinput.get_oqparam(job_ini)
    if oq.calculation_mode == 'classical':
        oq.calculation_mode = 'preclassical'
    oq.ground_motion_fields = False
    output_dir = output_dir or os.path.dirname(job_ini)
    from openquake.calculators import base  # ugly
    calc = base.calculators(oq, calc_id)
    calc.save_params()  # needed to save oqparam

    # some taken is care so that the real calculation is not run:
    # the goal is to extract information about the source management only
    calc.pre_execute()
    if oq.calculation_mode == 'preclassical':
        calc.execute()
    rw = ReportWriter(calc.datastore)
    rw.make_report()
    report = (os.path.join(output_dir, 'report.rst') if output_dir
              else calc.datastore.export_path('report.rst'))
    try:
        rw.save(report)
    except IOError as exc:  # permission error
        sys.stderr.write(str(exc) + '\n')
    readinput.exposure = None  # ugly hack
    return report
Ejemplo n.º 18
0
def run2(job_haz, job_risk, concurrent_tasks, pdb, exports, monitor):
    """
    Run both hazard and risk, one after the other
    """
    hcalc = base.calculators(readinput.get_oqparam(job_haz), monitor)
    with monitor:
        monitor.monitor_dir = hcalc.datastore.calc_dir
        hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports)
        hc_id = hcalc.datastore.calc_id
        oq = readinput.get_oqparam(job_risk, hc_id=hc_id)
        rcalc = base.calculators(oq, monitor)
        monitor.monitor_dir = rcalc.datastore.calc_dir
        rcalc.run(concurrent_tasks=concurrent_tasks,
                  pdb=pdb,
                  exports=exports,
                  hazard_calculation_id=hc_id)
    return rcalc
Ejemplo n.º 19
0
def main(job_ini_or_zip_or_nrmls):
    """
    Check the validity of job.ini files, job.zip files and .xml files.
    NB: `oq check_input job_haz.ini job_risk.ini` is special-cased so
    that the risk files are checked before the hazard files.
    """
    if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):
        os.environ['OQ_DISTRIBUTE'] = 'processpool'
    all_inis = all(f.endswith('.ini') for f in job_ini_or_zip_or_nrmls)
    if all_inis:  # the typical case is job_haz.ini + job_risk.ini
        dic = {}
        for ini in job_ini_or_zip_or_nrmls:
            for key, val in readinput.get_params(ini).items():
                if key == 'inputs' and key in dic:
                    dic[key].update(val)
                else:  # the last wins
                    dic[key] = val
        with logs.init('job', dic) as log:
            logging.info('Running oq check_input %s',
                         ' '.join(job_ini_or_zip_or_nrmls))
            calc = base.calculators(log.get_oqparam(), log.calc_id)
            base.BaseCalculator.gzip_inputs = lambda self: None  # disable
            with mock.patch.dict(os.environ, {'OQ_CHECK_INPUT': '1'}):
                calc.read_inputs()
        return

    for job_ini_or_zip_or_nrml in job_ini_or_zip_or_nrmls:
        if job_ini_or_zip_or_nrml.endswith('.xml'):
            try:
                node = nrml.to_python(job_ini_or_zip_or_nrml)
                if node.tag.endswith('exposureModel'):
                    err = Exposure.check(job_ini_or_zip_or_nrml)
                    if err:
                        logging.warning(err)
                else:
                    logging.info('Checked %s', job_ini_or_zip_or_nrml)
            except Exception as exc:
                sys.exit(exc)
        else:  # .zip
            with logs.init('job', job_ini_or_zip_or_nrml) as log:
                path = os.path.abspath(job_ini_or_zip_or_nrml)
                logging.info('Running oq check_input %s', path)
                calc = base.calculators(log.get_oqparam(), log.calc_id)
                base.BaseCalculator.gzip_inputs = lambda self: None  # disable
                with mock.patch.dict(os.environ, {'OQ_CHECK_INPUT': '1'}):
                    calc.read_inputs()
Ejemplo n.º 20
0
def run2(job_haz, job_risk, calc_id, concurrent_tasks, pdb, reuse_input,
         loglevel, exports, params):
    """
    Run both hazard and risk, one after the other
    """
    oq = readinput.get_oqparam(job_haz, kw=params)
    hcalc = base.calculators(oq, calc_id)
    hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports)
    hcalc.datastore.close()
    hc_id = hcalc.datastore.calc_id
    rcalc_id = logs.init(level=getattr(logging, loglevel.upper()))
    params['hazard_calculation_id'] = str(hc_id)
    oq = readinput.get_oqparam(job_risk, kw=params)
    rcalc = base.calculators(oq, rcalc_id)
    if reuse_input:  # enable caching
        oq.cachedir = datastore.get_datadir()
    rcalc.run(pdb=pdb, exports=exports)
    return rcalc
Ejemplo n.º 21
0
def run_calc(job_id, oqparam, log_level, log_file, exports,
             hazard_calculation_id=None):
    """
    Run a calculation.

    :param job_id:
        ID of the current job
    :param oqparam:
        :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param str log_level:
        The desired logging level. Valid choices are 'debug', 'info',
        'progress', 'warn', 'error', and 'critical'.
    :param str log_file:
        Complete path (including file name) to file where logs will be written.
        If `None`, logging will just be printed to standard output.
    :param exports:
        A comma-separated string of export types.
    """
    monitor = Monitor('total runtime', measuremem=True)
    with logs.handle(job_id, log_level, log_file):  # run the job
        if USE_CELERY and os.environ.get('OQ_DISTRIBUTE') == 'celery':
            set_concurrent_tasks_default()
        calc = base.calculators(oqparam, monitor, calc_id=job_id)
        tb = 'None\n'
        try:
            logs.dbcmd('set_status', job_id, 'executing')
            _do_run_calc(calc, exports, hazard_calculation_id)
            expose_outputs(calc.datastore)
            records = views.performance_view(calc.datastore)
            logs.dbcmd('save_performance', job_id, records)
            calc.datastore.close()
            logs.LOG.info('Calculation %d finished correctly in %d seconds',
                          job_id, calc.monitor.duration)
            logs.dbcmd('finish', job_id, 'complete')
        except:
            tb = traceback.format_exc()
            try:
                logs.LOG.critical(tb)
                logs.dbcmd('finish', job_id, 'failed')
            except:  # an OperationalError may always happen
                sys.stderr.write(tb)
            raise
        finally:
            # if there was an error in the calculation, this part may fail;
            # in such a situation, we simply log the cleanup error without
            # taking further action, so that the real error can propagate
            try:
                if USE_CELERY:
                    celery_cleanup(TERMINATE, parallel.TaskManager.task_ids)
            except:
                # log the finalization error only if there is no real error
                if tb == 'None\n':
                    logs.LOG.error('finalizing', exc_info=True)
    return calc
Ejemplo n.º 22
0
def run2(job_haz, job_risk, concurrent_tasks, pdb, exports, params, monitor):
    """
    Run both hazard and risk, one after the other
    """
    hcalc = base.calculators(readinput.get_oqparam(job_haz), monitor)
    with monitor:
        hcalc.run(concurrent_tasks=concurrent_tasks,
                  pdb=pdb,
                  exports=exports,
                  **params)
        hc_id = hcalc.datastore.calc_id
        oq = readinput.get_oqparam(job_risk, hc_id=hc_id)
    rcalc = base.calculators(oq)
    with rcalc._monitor:
        # disable concurrency in the second calculation to avoid fork issues
        rcalc.run(concurrent_tasks=0,
                  pdb=pdb,
                  exports=exports,
                  hazard_calculation_id=hc_id,
                  **params)
    return rcalc
Ejemplo n.º 23
0
 def get_calc(self, testfile, job_ini, **kw):
     """
     Return the outputs of the calculation as a dictionary
     """
     self.testdir = os.path.dirname(testfile) if os.path.isfile(testfile) else testfile
     inis = [os.path.join(self.testdir, ini) for ini in job_ini.split(",")]
     params = readinput.get_params(inis)
     params.update(kw)
     oq = oqvalidation.OqParam(**params)
     oq.validate()
     # change this when debugging the test
     monitor = PerformanceMonitor(self.testdir)
     return base.calculators(oq, monitor)
Ejemplo n.º 24
0
 def get_calc(self, testfile, job_ini, **kw):
     """
     Return the outputs of the calculation as a dictionary
     """
     self.testdir = os.path.dirname(testfile) if os.path.isfile(testfile) \
         else testfile
     params = readinput.get_params(os.path.join(self.testdir, job_ini), kw)
     oqvalidation.OqParam.calculation_mode.validator.choices = tuple(
         base.calculators)
     oq = oqvalidation.OqParam(**params)
     oq.validate()
     # change this when debugging the test
     log = logs.init('calc', params)
     return base.calculators(oq, log.calc_id)
Ejemplo n.º 25
0
def validate_zip(request):
    """
    Leverage the engine libraries to check if a given zip archive is a valid
    calculation input

    :param request:
        a `django.http.HttpRequest` object containing a zip archive

    :returns: a JSON object, containing:
        * 'valid': a boolean indicating if the provided archive is valid
        * 'error_msg': the error message, if any error was found
                       (None otherwise)
    """
    archive = request.FILES.get('archive')
    if not archive:
        return HttpResponseBadRequest('Missing archive file')
    job_zip = archive.temporary_file_path()
    try:
        base.calculators(readinput.get_oqparam(job_zip)).read_inputs()
    except Exception as exc:
        return _make_response(str(exc), None, valid=False)
    else:
        return _make_response(None, None, valid=True)
Ejemplo n.º 26
0
 def get_calc(self, testfile, job_ini, **kw):
     """
     Return the outputs of the calculation as a dictionary
     """
     self.testdir = os.path.dirname(testfile) if os.path.isfile(testfile) \
         else testfile
     inis = [os.path.join(self.testdir, ini) for ini in job_ini.split(',')]
     params = readinput.get_params(inis)
     params.update(kw)
     oq = oqvalidation.OqParam(**params)
     oq.validate()
     # change this when debugging the test
     monitor = Monitor(self.testdir)
     return base.calculators(oq, monitor)
Ejemplo n.º 27
0
    def get_calc(self, testfile, job_ini, **kw):
        """
        Return the outputs of the calculation as a dictionary
        """
        self.testdir = os.path.dirname(testfile) if os.path.isfile(testfile) \
            else testfile
        inis = [os.path.join(self.testdir, ini) for ini in job_ini.split(',')]
        params = readinput.get_params(inis, **kw)

        oqvalidation.OqParam.calculation_mode.validator.choices = tuple(
            base.calculators)
        oq = oqvalidation.OqParam(**params)
        oq.validate()
        # change this when debugging the test
        return base.calculators(oq)
Ejemplo n.º 28
0
def _run(job_inis, concurrent_tasks, calc_id, pdb, loglevel, hc, exports,
         params):
    global calc_path
    assert len(job_inis) in (1, 2), job_inis
    # set the logs first of all
    calc_id = logs.init(calc_id, getattr(logging, loglevel.upper()))
    # disable gzip_input
    base.BaseCalculator.gzip_inputs = lambda self: None
    with performance.Monitor('total runtime', measuremem=True) as monitor:
        if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):
            os.environ['OQ_DISTRIBUTE'] = 'processpool'
        if len(job_inis) == 1:  # run hazard or risk
            if hc:
                hc_id = hc[0]
                rlz_ids = hc[1:]
            else:
                hc_id = None
                rlz_ids = ()
            oqparam = readinput.get_oqparam(job_inis[0], hc_id=hc_id)
            if not oqparam.cachedir:  # enable caching
                oqparam.cachedir = datastore.get_datadir()
            if hc_id and hc_id < 0:  # interpret negative calculation ids
                calc_ids = datastore.get_calc_ids()
                try:
                    hc_id = calc_ids[hc_id]
                except IndexError:
                    raise SystemExit('There are %d old calculations, cannot '
                                     'retrieve the %s' %
                                     (len(calc_ids), hc_id))
            calc = base.calculators(oqparam, calc_id)
            calc.run(concurrent_tasks=concurrent_tasks,
                     pdb=pdb,
                     exports=exports,
                     hazard_calculation_id=hc_id,
                     rlz_ids=rlz_ids,
                     **params)
        else:  # run hazard + risk
            calc = run2(job_inis[0], job_inis[1], calc_id, concurrent_tasks,
                        pdb, loglevel, exports, params)

    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    print('See the output with silx view %s' % calc.datastore.filename)
    calc_path, _ = os.path.splitext(calc.datastore.filename)  # used below
    return calc
Ejemplo n.º 29
0
    def get_calc(self, testfile, job_ini, **kw):
        """
        Return the outputs of the calculation as a dictionary
        """
        self.testdir = os.path.dirname(testfile) if os.path.isfile(testfile) \
            else testfile
        inis = [os.path.join(self.testdir, ini) for ini in job_ini.split(',')]
        inputs = {k[:-5]: kw.pop(k) for k in list(kw) if k.endswith('_file')}
        params = readinput.get_params(inis, **inputs)
        params.update(kw)

        oqvalidation.OqParam.calculation_mode.validator.choices = tuple(
            base.calculators)
        oq = oqvalidation.OqParam(**params)
        oq.validate()
        # change this when debugging the test
        monitor = performance.Monitor(self.testdir)
        return base.calculators(oq, monitor)
Ejemplo n.º 30
0
def run_calc(log):
    """
    Run a calculation.

    :param log:
        LogContext of the current job
    """
    register_signals()
    setproctitle('oq-job-%d' % log.calc_id)
    with log:
        oqparam = log.get_oqparam()
        calc = base.calculators(oqparam, log.calc_id)
        logging.info('%s running %s [--hc=%s]',
                     getpass.getuser(),
                     calc.oqparam.inputs['job_ini'],
                     calc.oqparam.hazard_calculation_id)
        logging.info('Using engine version %s', __version__)
        msg = check_obsolete_version(oqparam.calculation_mode)
        # NB: disabling the warning should be done only for users with
        # an updated LTS version, but we are doing it for all users
        # if msg:
        #    logging.warning(msg)
        calc.from_engine = True
        if config.zworkers['host_cores']:
            set_concurrent_tasks_default(calc)
        else:
            logging.warning('Assuming %d %s workers',
                            parallel.Starmap.num_cores, OQ_DISTRIBUTE)
        t0 = time.time()
        calc.run()
        logging.info('Exposing the outputs to the database')
        expose_outputs(calc.datastore)
        path = calc.datastore.filename
        size = general.humansize(getsize(path))
        logging.info('Stored %s on %s in %d seconds',
                     size, path, time.time() - t0)
        calc.datastore.close()
        for line in logs.dbcmd('list_outputs', log.calc_id, False):
            general.safeprint(line)
        # sanity check to make sure that the logging on file is working
        if (log.log_file and log.log_file != os.devnull and
                getsize(log.log_file) == 0):
            logging.warning('The log file %s is empty!?' % log.log_file)
    return calc
Ejemplo n.º 31
0
def build_report(job_ini, output_dir=None):
    """
    Write a `report.csv` file with information about the calculation.

    :param job_ini:
        full pathname of the job.ini file
    :param output_dir:
        the directory where the report is written (default the input directory)
    """
    oq = readinput.get_oqparam(job_ini)
    output_dir = output_dir or os.path.dirname(job_ini)
    calc = base.calculators(oq)
    calc.pre_execute()
    calc.save_params()
    ds = datastore.DataStore(calc.datastore.calc_id)
    rw = ReportWriter(ds)
    rw.make_report()
    report = os.path.join(output_dir, 'report.rst')
    rw.save(report)
    return report
Ejemplo n.º 32
0
def run(job_ini,
        concurrent_tasks=None,
        pdb=None,
        loglevel='info',
        hc=None,
        exports=''):
    """
    Run a calculation. Optionally, set the number of concurrent_tasks
    (0 to disable the parallelization).
    """
    logging.basicConfig(level=getattr(logging, loglevel.upper()))
    job_inis = job_ini.split(',')
    assert len(job_inis) in (1, 2), job_inis
    monitor = performance.PerformanceMonitor('total', measuremem=True)

    if len(job_inis) == 1:  # run hazard or risk
        oqparam = readinput.get_oqparam(job_inis[0], hc_id=hc)
        if hc and hc < 0:  # interpret negative calculation ids
            calc_ids = datastore.get_calc_ids()
            try:
                hc = calc_ids[hc]
            except IndexError:
                raise SystemExit('There are %d old calculations, cannot '
                                 'retrieve the %s' % (len(calc_ids), hc))
        calc = base.calculators(oqparam, monitor)
        monitor.monitor_dir = calc.datastore.calc_dir
        with monitor:
            calc.run(concurrent_tasks=concurrent_tasks,
                     pdb=pdb,
                     exports=exports,
                     hazard_calculation_id=hc)
    else:  # run hazard + risk
        calc = run2(job_inis[0], job_inis[1], concurrent_tasks, pdb, exports,
                    monitor)

    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    monitor.flush()
    print('See the output with hdfview %s/output.hdf5' %
          calc.datastore.calc_dir)
    return calc
Ejemplo n.º 33
0
def main(job_ini_or_zip_or_nrmls):
    if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):
        os.environ['OQ_DISTRIBUTE'] = 'processpool'
    for job_ini_or_zip_or_nrml in job_ini_or_zip_or_nrmls:
        if job_ini_or_zip_or_nrml.endswith('.xml'):
            try:
                node = nrml.to_python(job_ini_or_zip_or_nrml)
                if node.tag.endswith('exposureModel'):
                    err = Exposure.check(job_ini_or_zip_or_nrml)
                    if err:
                        logging.warning(err)
                else:
                    logging.info('Checked %s', job_ini_or_zip_or_nrml)
            except Exception as exc:
                sys.exit(exc)
        else:
            with logs.init('calc', job_ini_or_zip_or_nrml) as log:
                calc = base.calculators(log.get_oqparam(), log.calc_id)
                base.BaseCalculator.gzip_inputs = lambda self: None  # disable
                with mock.patch.dict(os.environ, {'OQ_CHECK_INPUT': '1'}):
                    calc.read_inputs()
Ejemplo n.º 34
0
def build_report(job_ini, output_dir=None):
    """
    Write a `report.csv` file with information about the calculation
    without running it

    :param job_ini:
        full pathname of the job.ini file
    :param output_dir:
        the directory where the report is written (default the input directory)
    """
    calc_id = logs.init()
    oq = readinput.get_oqparam(job_ini)
    if 'source_model_logic_tree' in oq.inputs:
        oq.calculation_mode = 'preclassical'
    oq.ground_motion_fields = False
    output_dir = output_dir or os.path.dirname(job_ini)
    from openquake.calculators import base  # ugly
    calc = base.calculators(oq, calc_id)
    calc.save_params()  # needed to save oqparam

    # some taken is care so that the real calculation is not run:
    # the goal is to extract information about the source management only
    calc.pre_execute()
    if oq.calculation_mode == 'preclassical':
        calc.execute()
    logging.info('Making the .rst report')
    rw = ReportWriter(calc.datastore)
    try:
        rw.make_report()
    finally:
        parallel.Starmap.shutdown()
    report = (os.path.join(output_dir, 'report.rst')
              if output_dir else calc.datastore.export_path('report.rst'))
    try:
        rw.save(report)
    except IOError as exc:  # permission error
        sys.stderr.write(str(exc) + '\n')
    readinput.exposure = None  # ugly hack
    return report
Ejemplo n.º 35
0
def _run(job_ini, concurrent_tasks, pdb, loglevel, hc, exports, params):
    global calc_path
    logging.basicConfig(level=getattr(logging, loglevel.upper()))
    job_inis = job_ini.split(',')
    assert len(job_inis) in (1, 2), job_inis
    monitor = performance.Monitor(
        'total runtime', measuremem=True)
    if len(job_inis) == 1:  # run hazard or risk
        if hc:
            hc_id = hc[0]
            rlz_ids = hc[1:]
        else:
            hc_id = None
            rlz_ids = ()
        oqparam = readinput.get_oqparam(job_inis[0], hc_id=hc_id)
        if hc_id and hc_id < 0:  # interpret negative calculation ids
            calc_ids = datastore.get_calc_ids()
            try:
                hc_id = calc_ids[hc_id]
            except IndexError:
                raise SystemExit(
                    'There are %d old calculations, cannot '
                    'retrieve the %s' % (len(calc_ids), hc_id))
        calc = base.calculators(oqparam, monitor)
        with calc.monitor:
            calc.run(concurrent_tasks=concurrent_tasks, pdb=pdb,
                     exports=exports, hazard_calculation_id=hc_id,
                     rlz_ids=rlz_ids, **params)
    else:  # run hazard + risk
        calc = run2(
            job_inis[0], job_inis[1], concurrent_tasks, pdb,
            exports, params, monitor)

    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    monitor.flush()
    print('See the output with hdfview %s' % calc.datastore.hdf5path)
    calc_path = calc.datastore.calc_dir  # used for the .pstat filename
    return calc
Ejemplo n.º 36
0
def _run(job_inis, concurrent_tasks, pdb, loglevel, hc, exports, params):
    global calc_path
    assert len(job_inis) in (1, 2), job_inis
    # set the logs first of all
    calc_id = logs.init(level=getattr(logging, loglevel.upper()))
    with performance.Monitor('total runtime', measuremem=True) as monitor:
        if len(job_inis) == 1:  # run hazard or risk
            if hc:
                hc_id = hc[0]
                rlz_ids = hc[1:]
            else:
                hc_id = None
                rlz_ids = ()
            oqparam = readinput.get_oqparam(job_inis[0], hc_id=hc_id)
            vars(oqparam).update(params)
            if hc_id and hc_id < 0:  # interpret negative calculation ids
                calc_ids = datastore.get_calc_ids()
                try:
                    hc_id = calc_ids[hc_id]
                except IndexError:
                    raise SystemExit('There are %d old calculations, cannot '
                                     'retrieve the %s' %
                                     (len(calc_ids), hc_id))
            calc = base.calculators(oqparam, calc_id)
            calc.run(concurrent_tasks=concurrent_tasks,
                     pdb=pdb,
                     exports=exports,
                     hazard_calculation_id=hc_id,
                     rlz_ids=rlz_ids)
        else:  # run hazard + risk
            calc = run2(job_inis[0], job_inis[1], calc_id, concurrent_tasks,
                        pdb, loglevel, exports, params)

    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    print('See the output with hdfview %s' % calc.datastore.hdf5path)
    calc_path, _ = os.path.splitext(calc.datastore.hdf5path)  # used below
    return calc
Ejemplo n.º 37
0
def _run(job_inis, concurrent_tasks, pdb, loglevel, hc, exports, params):
    global calc_path
    assert len(job_inis) in (1, 2), job_inis
    # set the logs first of all
    calc_id = logs.init(level=getattr(logging, loglevel.upper()))
    with performance.Monitor('total runtime', measuremem=True) as monitor:
        if len(job_inis) == 1:  # run hazard or risk
            if hc:
                hc_id = hc[0]
                rlz_ids = hc[1:]
            else:
                hc_id = None
                rlz_ids = ()
            oqparam = readinput.get_oqparam(job_inis[0], hc_id=hc_id)
            vars(oqparam).update(params)
            if hc_id and hc_id < 0:  # interpret negative calculation ids
                calc_ids = datastore.get_calc_ids()
                try:
                    hc_id = calc_ids[hc_id]
                except IndexError:
                    raise SystemExit(
                        'There are %d old calculations, cannot '
                        'retrieve the %s' % (len(calc_ids), hc_id))
            calc = base.calculators(oqparam, calc_id)
            calc.run(concurrent_tasks=concurrent_tasks, pdb=pdb,
                     exports=exports, hazard_calculation_id=hc_id,
                     rlz_ids=rlz_ids)
        else:  # run hazard + risk
            calc = run2(
                job_inis[0], job_inis[1], calc_id, concurrent_tasks, pdb,
                loglevel, exports, params)

    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    print('See the output with silx view %s' % calc.datastore.filename)
    calc_path, _ = os.path.splitext(calc.datastore.filename)  # used below
    return calc
Ejemplo n.º 38
0
def build_report(job_ini, output_dir=None):
    """
    Write a `report.csv` file with information about the calculation.

    :param job_ini:
        full pathname of the job.ini file
    :param output_dir:
        the directory where the report is written (default the input directory)
    """
    oq = readinput.get_oqparam(job_ini)
    output_dir = output_dir or os.path.dirname(job_ini)
    calc = base.calculators(oq)
    calc.pre_execute()
    calc.save_params()
    ds = datastore.DataStore(calc.datastore.calc_id)
    rw = ReportWriter(ds)
    rw.make_report()
    report = (os.path.join(output_dir, 'report.rst') if output_dir
              else ds.export_path('report.rst'))
    try:
        rw.save(report)
    except IOError as exc:  # permission error
        sys.stderr.write(str(exc) + '\n')
    return report
Ejemplo n.º 39
0
def run_calc(job_id, oqparam, exports, hazard_calculation_id=None, **kw):
    """
    Run a calculation.

    :param job_id:
        ID of the current job
    :param oqparam:
        :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param exports:
        A comma-separated string of export types.
    """
    register_signals()
    setproctitle('oq-job-%d' % job_id)
    calc = base.calculators(oqparam, calc_id=job_id)
    logging.info('%s running %s [--hc=%s]',
                 getpass.getuser(),
                 calc.oqparam.inputs['job_ini'],
                 calc.oqparam.hazard_calculation_id)
    logging.info('Using engine version %s', __version__)
    msg = check_obsolete_version(oqparam.calculation_mode)
    if msg:
        logs.LOG.warn(msg)
    calc.from_engine = True
    tb = 'None\n'
    try:
        if not oqparam.hazard_calculation_id:
            if 'input_zip' in oqparam.inputs:  # starting from an archive
                with open(oqparam.inputs['input_zip'], 'rb') as arch:
                    data = numpy.array(arch.read())
            else:
                logs.LOG.info('Zipping the input files')
                bio = io.BytesIO()
                oqzip.zip_job(oqparam.inputs['job_ini'], bio, (), oqparam,
                              logging.debug)
                data = numpy.array(bio.getvalue())
                del bio
            calc.datastore['input/zip'] = data
            calc.datastore.set_attrs('input/zip', nbytes=data.nbytes)
            del data  # save memory

        poll_queue(job_id, _PID, poll_time=15)
        if OQ_DISTRIBUTE.endswith('pool'):
            logs.LOG.warning('Using %d cores on %s',
                             parallel.cpu_count, platform.node())
        if OQ_DISTRIBUTE == 'zmq':
            logs.dbcmd('zmq_start')  # start zworkers
            logs.dbcmd('zmq_wait')  # wait for them to go up
        if OQ_DISTRIBUTE.startswith(('celery', 'zmq')):
            set_concurrent_tasks_default(job_id)
        t0 = time.time()
        calc.run(exports=exports,
                 hazard_calculation_id=hazard_calculation_id,
                 close=False, **kw)
        logs.LOG.info('Exposing the outputs to the database')
        expose_outputs(calc.datastore)
        duration = time.time() - t0
        records = views.performance_view(calc.datastore, add_calc_id=False)
        logs.dbcmd('save_performance', job_id, records)
        calc.datastore.close()
        logs.LOG.info('Calculation %d finished correctly in %d seconds',
                      job_id, duration)
        logs.dbcmd('finish', job_id, 'complete')
    except BaseException as exc:
        if isinstance(exc, MasterKilled):
            msg = 'aborted'
        else:
            msg = 'failed'
        tb = traceback.format_exc()
        try:
            logs.LOG.critical(tb)
            logs.dbcmd('finish', job_id, msg)
        except BaseException:  # an OperationalError may always happen
            sys.stderr.write(tb)
        raise
    finally:
        # if there was an error in the calculation, this part may fail;
        # in such a situation, we simply log the cleanup error without
        # taking further action, so that the real error can propagate
        if OQ_DISTRIBUTE == 'zmq':  # stop zworkers
            logs.dbcmd('zmq_stop')
        try:
            if OQ_DISTRIBUTE.startswith('celery'):
                celery_cleanup(TERMINATE)
        except BaseException:
            # log the finalization error only if there is no real error
            if tb == 'None\n':
                logs.LOG.error('finalizing', exc_info=True)
    return calc
Ejemplo n.º 40
0
 def get_calc(self, job_ini):
     log = logs.init("job", job_ini)
     log.__enter__()
     return calculators(log.get_oqparam(), log.calc_id)
Ejemplo n.º 41
0
def run_calc(job_id, oqparam, exports, hazard_calculation_id=None, **kw):
    """
    Run a calculation.

    :param job_id:
        ID of the current job
    :param oqparam:
        :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param exports:
        A comma-separated string of export types.
    """
    register_signals()
    setproctitle('oq-job-%d' % job_id)
    calc = base.calculators(oqparam, calc_id=job_id)
    logging.info('%s running %s [--hc=%s]',
                 getpass.getuser(),
                 calc.oqparam.inputs['job_ini'],
                 calc.oqparam.hazard_calculation_id)
    logging.info('Using engine version %s', __version__)
    msg = check_obsolete_version(oqparam.calculation_mode)
    if msg:
        logs.LOG.warn(msg)
    if OQ_DISTRIBUTE.startswith(('celery', 'zmq')):
        set_concurrent_tasks_default(job_id)
    calc.from_engine = True
    tb = 'None\n'
    try:
        if not oqparam.hazard_calculation_id:
            if 'input_zip' in oqparam.inputs:  # starting from an archive
                with open(oqparam.inputs['input_zip'], 'rb') as arch:
                    data = numpy.array(arch.read())
            else:
                logs.LOG.info('zipping the input files')
                bio = io.BytesIO()
                oqzip.zip_job(oqparam.inputs['job_ini'], bio, (), oqparam,
                              logging.debug)
                data = numpy.array(bio.getvalue())
                del bio
            calc.datastore['input/zip'] = data
            calc.datastore.set_attrs('input/zip', nbytes=data.nbytes)
            del data  # save memory

        poll_queue(job_id, _PID, poll_time=15)
        t0 = time.time()
        calc.run(exports=exports,
                 hazard_calculation_id=hazard_calculation_id,
                 close=False, **kw)
        logs.LOG.info('Exposing the outputs to the database')
        expose_outputs(calc.datastore)
        duration = time.time() - t0
        calc._monitor.flush()
        records = views.performance_view(calc.datastore)
        logs.dbcmd('save_performance', job_id, records)
        calc.datastore.close()
        logs.LOG.info('Calculation %d finished correctly in %d seconds',
                      job_id, duration)
        logs.dbcmd('finish', job_id, 'complete')
    except BaseException as exc:
        if isinstance(exc, MasterKilled):
            msg = 'aborted'
        else:
            msg = 'failed'
        tb = traceback.format_exc()
        try:
            logs.LOG.critical(tb)
            logs.dbcmd('finish', job_id, msg)
        except BaseException:  # an OperationalError may always happen
            sys.stderr.write(tb)
        raise
    finally:
        # if there was an error in the calculation, this part may fail;
        # in such a situation, we simply log the cleanup error without
        # taking further action, so that the real error can propagate
        try:
            if OQ_DISTRIBUTE.startswith('celery'):
                celery_cleanup(TERMINATE)
        except BaseException:
            # log the finalization error only if there is no real error
            if tb == 'None\n':
                logs.LOG.error('finalizing', exc_info=True)
    return calc
Ejemplo n.º 42
0
def run_calc(job_id, oqparam, exports, hazard_calculation_id=None, **kw):
    """
    Run a calculation.

    :param job_id:
        ID of the current job
    :param oqparam:
        :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param exports:
        A comma-separated string of export types.
    """
    register_signals()
    setproctitle('oq-job-%d' % job_id)
    calc = base.calculators(oqparam, calc_id=job_id)
    logging.info('%s running %s [--hc=%s]', getpass.getuser(),
                 calc.oqparam.inputs['job_ini'],
                 calc.oqparam.hazard_calculation_id)
    logging.info('Using engine version %s', __version__)
    msg = check_obsolete_version(oqparam.calculation_mode)
    if msg:
        logs.LOG.warn(msg)
    calc.from_engine = True
    tb = 'None\n'
    try:
        poll_queue(job_id, _PID, poll_time=15)
    except BaseException:
        # the job aborted even before starting
        logs.dbcmd('finish', job_id, 'aborted')
        return
    try:
        if OQ_DISTRIBUTE.endswith('pool'):
            logs.LOG.warning('Using %d cores on %s',
                             parallel.Starmap.num_cores, platform.node())
        if OQ_DISTRIBUTE == 'zmq' and config.zworkers['host_cores']:
            logs.dbcmd('zmq_start')  # start the zworkers
            logs.dbcmd('zmq_wait')  # wait for them to go up
        set_concurrent_tasks_default(calc)
        t0 = time.time()
        calc.run(exports=exports,
                 hazard_calculation_id=hazard_calculation_id,
                 **kw)
        logs.LOG.info('Exposing the outputs to the database')
        expose_outputs(calc.datastore)
        duration = time.time() - t0
        records = views.performance_view(calc.datastore, add_calc_id=False)
        logs.dbcmd('save_performance', job_id, records)
        calc.datastore.close()
        logs.LOG.info('Calculation %d finished correctly in %d seconds',
                      job_id, duration)
        logs.dbcmd('finish', job_id, 'complete')
    except BaseException as exc:
        if isinstance(exc, MasterKilled):
            msg = 'aborted'
        else:
            msg = 'failed'
        tb = traceback.format_exc()
        try:
            logs.LOG.critical(tb)
            logs.dbcmd('finish', job_id, msg)
        except BaseException:  # an OperationalError may always happen
            sys.stderr.write(tb)
        raise
    finally:
        # if there was an error in the calculation, this part may fail;
        # in such a situation, we simply log the cleanup error without
        # taking further action, so that the real error can propagate
        if OQ_DISTRIBUTE == 'zmq' and config.zworkers['host_cores']:
            logs.dbcmd('zmq_stop')  # stop the zworkers
        try:
            if OQ_DISTRIBUTE.startswith('celery'):
                celery_cleanup(TERMINATE)
        except BaseException:
            # log the finalization error only if there is no real error
            if tb == 'None\n':
                logs.LOG.error('finalizing', exc_info=True)
    return calc