Beispiel #1
0
def run2(job_haz, job_risk, concurrent_tasks, exports, monitor):
    """
    Run both hazard and risk, one after the other
    """
    hcalc = base.calculators(readinput.get_oqparam(job_haz), monitor)
    with monitor:
        monitor.monitor_dir = hcalc.datastore.calc_dir
        hcalc.run(concurrent_tasks=concurrent_tasks, exports=exports)
        hc_id = hcalc.datastore.calc_id
        oq = readinput.get_oqparam(job_risk, hc_id=hc_id)
        rcalc = base.calculators(oq, monitor)
        monitor.monitor_dir = rcalc.datastore.calc_dir
        rcalc.run(concurrent_tasks=concurrent_tasks, exports=exports,
                  hazard_calculation_id=hc_id)
    return rcalc
Beispiel #2
0
def run(job_ini, concurrent_tasks=None,
        loglevel='info', hc=None, exports=''):
    """
    Run a calculation. Optionally, set the number of concurrent_tasks
    (0 to disable the parallelization).
    """
    logging.basicConfig(level=getattr(logging, loglevel.upper()))
    job_inis = job_ini.split(',')
    assert len(job_inis) in (1, 2), job_inis
    monitor = performance.Monitor('total', measuremem=True)

    if len(job_inis) == 1:  # run hazard or risk
        oqparam = readinput.get_oqparam(job_inis[0], hc_id=hc)
        if hc and hc < 0:  # interpret negative calculation ids
            calc_ids = datastore.get_calc_ids()
            try:
                hc = calc_ids[hc]
            except IndexError:
                raise SystemExit('There are %d old calculations, cannot '
                                 'retrieve the %s' % (len(calc_ids), hc))
        calc = base.calculators(oqparam, monitor)
        monitor.monitor_dir = calc.datastore.calc_dir
        with monitor:
            calc.run(concurrent_tasks=concurrent_tasks, exports=exports,
                     hazard_calculation_id=hc)
    else:  # run hazard + risk
        calc = run2(
            job_inis[0], job_inis[1], concurrent_tasks, exports, monitor)

    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    monitor.flush()
    print('See the output with hdfview %s/output.hdf5' %
          calc.datastore.calc_dir)
    return calc
Beispiel #3
0
def build_report(job_ini, output_dir=None):
    """
    Write a `report.csv` file with information about the calculation.

    :param job_ini:
        full pathname of the job.ini file
    :param output_dir:
        the directory where the report is written (default the input directory)
    """
    oq = readinput.get_oqparam(job_ini)
    output_dir = output_dir or os.path.dirname(job_ini)
    calc = base.calculators(oq)
    calc.pre_execute()
    ds = datastore.DataStore(calc.datastore.calc_id)
    rw = ReportWriter(ds)
    report = os.path.join(output_dir, 'report.rst')
    for name in ('params', 'inputs'):
        rw.add(name)
    if 'scenario' not in oq.calculation_mode:
        rw.add('csm_info')
    rw.add('rlzs_assoc', calc.rlzs_assoc)
    if 'num_ruptures' in ds:
        rw.add('rupture_collections')
        rw.add('col_rlz_assocs')
    if oq.calculation_mode in ('classical', 'event_based', 'ebr'):
        rw.add('data_transfer')
    rw.save(report)
    return report
Beispiel #4
0
def build_report(job_ini, output_dir=None):
    """
    Write a `report.csv` file with information about the calculation.

    :param job_ini:
        full pathname of the job.ini file
    :param output_dir:
        the directory where the report is written (default the input directory)
    """
    oq = readinput.get_oqparam(job_ini)
    output_dir = output_dir or os.path.dirname(job_ini)
    calc = base.calculators(oq)
    calc.pre_execute()
    ds = datastore.DataStore(calc.datastore.calc_id)
    rw = ReportWriter(ds)
    report = os.path.join(output_dir, 'report.rst')
    for name in ('params', 'inputs'):
        rw.add(name)
    if 'scenario' not in oq.calculation_mode:
        rw.add('csm_info')
    rw.add('rlzs_assoc', calc.rlzs_assoc)
    if 'num_ruptures' in ds:
        rw.add('rupture_collections')
        rw.add('col_rlz_assocs')
    if oq.calculation_mode in ('classical', 'event_based', 'ebr'):
        rw.add('data_transfer')
    rw.save(report)
    return report
Beispiel #5
0
 def get_calc(self, testfile, job_ini, **kw):
     """
     Return the outputs of the calculation as a dictionary
     """
     self.testdir = os.path.dirname(testfile) if os.path.isfile(testfile) \
         else testfile
     inis = [os.path.join(self.testdir, ini) for ini in job_ini.split(',')]
     params = readinput.get_params(inis)
     params.update(kw)
     oq = oqvalidation.OqParam(**params)
     oq.validate()
     # change this when debugging the test
     monitor = Monitor(self.testdir)
     return base.calculators(oq, monitor)
Beispiel #6
0
def run_calc(job, log_level, log_file, exports, lite=False):
    """
    Run a calculation.

    :param job:
        :class:`openquake.engine.db.model.OqJob` instance
    :param str log_level:
        The desired logging level. Valid choices are 'debug', 'info',
        'progress', 'warn', 'error', and 'critical'.
    :param str log_file:
        Complete path (including file name) to file where logs will be written.
        If `None`, logging will just be printed to standard output.
    :param exports:
        A comma-separated string of export types.
    :param lite:
        Flag set when the oq-lite calculators are used
    """
    # let's import the calculator classes here, when they are needed;
    # the reason is that the command `$ oq-engine --upgrade-db`
    # does not need them and would raise strange errors during installation
    # time if the PYTHONPATH is not set and commonlib is not visible
    if lite:
        calc_dir = os.path.join(datastore.DATADIR, 'calc_%d' % job.id)
        if os.path.exists(calc_dir):
            os.rename(calc_dir, calc_dir + '.bak')
            print 'Generated %s.bak' % calc_dir
        from openquake.commonlib.calculators import base
        calculator = base.calculators(job.get_oqparam(), calc_id=job.id)
        calculator.job = job
        calculator.monitor = EnginePerformanceMonitor('', job.id)
    else:
        from openquake.engine.calculators import calculators
        calculator = calculators(job)

    # first of all check the database version and exit if the db is outdated
    upgrader.check_versions(django_db.connections['admin'])
    with logs.handle(job, log_level, log_file), job_stats(job):  # run the job
        try:
            _do_run_calc(calculator, exports)
        except:
            tb = traceback.format_exc()
            logs.LOG.critical(tb)
            raise
    return calculator
Beispiel #7
0
def run_calc(job, log_level, log_file, exports, lite=False):
    """
    Run a calculation.

    :param job:
        :class:`openquake.engine.db.model.OqJob` instance
    :param str log_level:
        The desired logging level. Valid choices are 'debug', 'info',
        'progress', 'warn', 'error', and 'critical'.
    :param str log_file:
        Complete path (including file name) to file where logs will be written.
        If `None`, logging will just be printed to standard output.
    :param exports:
        A comma-separated string of export types.
    :param lite:
        Flag set when the oq-lite calculators are used
    """
    # let's import the calculator classes here, when they are needed
    # the reason is that the command `$ oq-engine --upgrade-db`
    # does not need them and would raise strange errors during installation
    # time if the PYTHONPATH is not set and commonlib is not visible
    if lite:
        from openquake.commonlib.calculators import base
        calculator = base.calculators(job.get_oqparam())
        calculator.job = job
        calculator.monitor = EnginePerformanceMonitor('', job.id)
    else:
        from openquake.engine.calculators import calculators
        calculator = calculators(job)

    # first of all check the database version and exit if the db is outdated
    upgrader.check_versions(django_db.connections['admin'])
    with logs.handle(job, log_level, log_file), job_stats(job):  # run the job
        try:
            _do_run_calc(calculator, exports)
        except:
            tb = traceback.format_exc()
            logs.LOG.critical(tb)
            raise
    return calculator
Beispiel #8
0
def run(job_ini, concurrent_tasks=None,
        loglevel='info', hc=None, exports=''):
    """
    Run a calculation. Optionally, set the number of concurrent_tasks
    (0 to disable the parallelization).
    """
    logging.basicConfig(level=getattr(logging, loglevel.upper()))
    oqparam = readinput.get_oqparam(job_ini)
    if concurrent_tasks is not None:
        oqparam.concurrent_tasks = concurrent_tasks
    oqparam.hazard_calculation_id = hc
    oqparam.exports = exports
    monitor = performance.Monitor('total', measuremem=True)
    calc = base.calculators(oqparam, monitor)
    monitor.monitor_dir = calc.datastore.calc_dir
    logging.info('Started job with output in %s', calc.datastore.calc_dir)
    with monitor:
        calc.run()
    logging.info('See the output with hdfview %s/output.hdf5',
                 calc.datastore.calc_dir)
    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    monitor.flush()
Beispiel #9
0
def info(name, filtersources=False, weightsources=False, datatransfer=False):
    """
    Give information. You can pass the name of an available calculator,
    a job.ini file, or a zip archive with the input files.
    """
    logging.basicConfig(level=logging.INFO)
    with Monitor('info', measuremem=True) as mon:
        if datatransfer:
            oqparam = readinput.get_oqparam(name)
            calc = base.calculators(oqparam)
            calc.pre_execute()
            n_tasks, to_send_forward, to_send_back = data_transfer(calc)
            _print_info(calc.rlzs_assoc, oqparam,
                        calc.composite_source_model, calc.sitecol,
                        weightsources=True)
            print('Number of tasks to be generated: %d' % n_tasks)
            print('Estimated data to be sent forward: %s' %
                  humansize(to_send_forward))
            print('Estimated data to be sent back: %s' %
                  humansize(to_send_back))
        else:
            _info(name, filtersources, weightsources)
    if mon.duration > 1:
        print(mon)