Esempio n. 1
0
def zip(what, archive_zip='', risk_file=''):
    logging.basicConfig(level=logging.INFO)
    if os.path.isdir(what):
        oqzip.zip_all(what)
    elif what.endswith('.xml') and '<logicTree' in open(what).read(512):
        # hack to see if the NRML file is of kind logicTree
        oqzip.zip_source_model(what, archive_zip)
    elif what.endswith('.xml') and '<exposureModel' in open(what).read(512):
        # hack to see if the NRML file is of kind exposureModel
        oqzip.zip_exposure(what, archive_zip)
    elif what.endswith('.ini'):  # a job.ini
        oqzip.zip_job(what, archive_zip, risk_file)
    else:
        sys.exit('Cannot zip %s' % what)
Esempio n. 2
0
def zip(what, archive_zip='', risk_file=''):
    """
    Zip into an archive one or two job.ini files with all related files
    """
    if os.path.isdir(what):
        oqzip.zip_all(what)
    elif what.endswith('.xml') and '<logicTree' in open(what).read(512):
        # hack to see if the NRML file is of kind logicTree
        oqzip.zip_source_model(what, archive_zip)
    elif what.endswith('.xml') and '<exposureModel' in open(what).read(512):
        # hack to see if the NRML file is of kind exposureModel
        oqzip.zip_exposure(what, archive_zip)
    elif what.endswith('.ini'):  # a job.ini
        oqzip.zip_job(what, archive_zip, risk_file)
    else:
        sys.exit('Cannot zip %s' % what)
Esempio n. 3
0
def zip(what, archive_zip='', risk_file=''):
    """
    Zip into an archive one or two job.ini files with all related files
    """
    if os.path.isdir(what):
        oqzip.zip_all(what)
    elif what.endswith('.xml') and '<logicTree' in open(what).read(512):
        # hack to see if the NRML file is of kind logicTree
        oqzip.zip_source_model(what, archive_zip)
    elif what.endswith('.xml') and '<exposureModel' in open(what).read(512):
        # hack to see if the NRML file is of kind exposureModel
        oqzip.zip_exposure(what, archive_zip)
    elif what.endswith('.ini'):  # a job.ini
        oqzip.zip_job(what, archive_zip, risk_file)
    else:
        sys.exit('Cannot zip %s' % what)
Esempio n. 4
0
def main(zipfile):
    """Post a zipfile to the WebUI"""
    sess = requests.Session()
    if config.webapi.username:
        login_url = '%s/accounts/ajax_login/' % config.webapi.server
        logging.info('POST %s', login_url)
        resp = sess.post(login_url,
                         data=dict(username=config.webapi.username,
                                   password=config.webapi.password))
        if resp.status_code != 200:
            raise WebAPIError(resp.text)
    if zipfile.endswith('.ini'):  # not a zip file yet
        archive = zipfile[:-3] + 'zip'
        oqzip.zip_job(zipfile, archive)
        zipfile = archive
    resp = sess.post("%s/v1/calc/run" % config.webapi.server, {},
                     files=dict(archive=open(zipfile, 'rb')))
    print(json.loads(resp.text))
Esempio n. 5
0
def run_calc(job_id, oqparam, exports, hazard_calculation_id=None, **kw):
    """
    Run a calculation.

    :param job_id:
        ID of the current job
    :param oqparam:
        :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param exports:
        A comma-separated string of export types.
    """
    register_signals()
    setproctitle('oq-job-%d' % job_id)
    calc = base.calculators(oqparam, calc_id=job_id)
    logging.info('%s running %s [--hc=%s]',
                 getpass.getuser(),
                 calc.oqparam.inputs['job_ini'],
                 calc.oqparam.hazard_calculation_id)
    logging.info('Using engine version %s', __version__)
    msg = check_obsolete_version(oqparam.calculation_mode)
    if msg:
        logs.LOG.warn(msg)
    calc.from_engine = True
    tb = 'None\n'
    try:
        if not oqparam.hazard_calculation_id:
            if 'input_zip' in oqparam.inputs:  # starting from an archive
                with open(oqparam.inputs['input_zip'], 'rb') as arch:
                    data = numpy.array(arch.read())
            else:
                logs.LOG.info('Zipping the input files')
                bio = io.BytesIO()
                oqzip.zip_job(oqparam.inputs['job_ini'], bio, (), oqparam,
                              logging.debug)
                data = numpy.array(bio.getvalue())
                del bio
            calc.datastore['input/zip'] = data
            calc.datastore.set_attrs('input/zip', nbytes=data.nbytes)
            del data  # save memory

        poll_queue(job_id, _PID, poll_time=15)
        if OQ_DISTRIBUTE.endswith('pool'):
            logs.LOG.warning('Using %d cores on %s',
                             parallel.cpu_count, platform.node())
        if OQ_DISTRIBUTE == 'zmq':
            logs.dbcmd('zmq_start')  # start zworkers
            logs.dbcmd('zmq_wait')  # wait for them to go up
        if OQ_DISTRIBUTE.startswith(('celery', 'zmq')):
            set_concurrent_tasks_default(job_id)
        t0 = time.time()
        calc.run(exports=exports,
                 hazard_calculation_id=hazard_calculation_id,
                 close=False, **kw)
        logs.LOG.info('Exposing the outputs to the database')
        expose_outputs(calc.datastore)
        duration = time.time() - t0
        records = views.performance_view(calc.datastore, add_calc_id=False)
        logs.dbcmd('save_performance', job_id, records)
        calc.datastore.close()
        logs.LOG.info('Calculation %d finished correctly in %d seconds',
                      job_id, duration)
        logs.dbcmd('finish', job_id, 'complete')
    except BaseException as exc:
        if isinstance(exc, MasterKilled):
            msg = 'aborted'
        else:
            msg = 'failed'
        tb = traceback.format_exc()
        try:
            logs.LOG.critical(tb)
            logs.dbcmd('finish', job_id, msg)
        except BaseException:  # an OperationalError may always happen
            sys.stderr.write(tb)
        raise
    finally:
        # if there was an error in the calculation, this part may fail;
        # in such a situation, we simply log the cleanup error without
        # taking further action, so that the real error can propagate
        if OQ_DISTRIBUTE == 'zmq':  # stop zworkers
            logs.dbcmd('zmq_stop')
        try:
            if OQ_DISTRIBUTE.startswith('celery'):
                celery_cleanup(TERMINATE)
        except BaseException:
            # log the finalization error only if there is no real error
            if tb == 'None\n':
                logs.LOG.error('finalizing', exc_info=True)
    return calc
Esempio n. 6
0
def run_calc(job_id, oqparam, exports, hazard_calculation_id=None, **kw):
    """
    Run a calculation.

    :param job_id:
        ID of the current job
    :param oqparam:
        :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param exports:
        A comma-separated string of export types.
    """
    register_signals()
    setproctitle('oq-job-%d' % job_id)
    calc = base.calculators(oqparam, calc_id=job_id)
    logging.info('%s running %s [--hc=%s]',
                 getpass.getuser(),
                 calc.oqparam.inputs['job_ini'],
                 calc.oqparam.hazard_calculation_id)
    logging.info('Using engine version %s', __version__)
    msg = check_obsolete_version(oqparam.calculation_mode)
    if msg:
        logs.LOG.warn(msg)
    if OQ_DISTRIBUTE.startswith(('celery', 'zmq')):
        set_concurrent_tasks_default(job_id)
    calc.from_engine = True
    tb = 'None\n'
    try:
        if not oqparam.hazard_calculation_id:
            if 'input_zip' in oqparam.inputs:  # starting from an archive
                with open(oqparam.inputs['input_zip'], 'rb') as arch:
                    data = numpy.array(arch.read())
            else:
                logs.LOG.info('zipping the input files')
                bio = io.BytesIO()
                oqzip.zip_job(oqparam.inputs['job_ini'], bio, (), oqparam,
                              logging.debug)
                data = numpy.array(bio.getvalue())
                del bio
            calc.datastore['input/zip'] = data
            calc.datastore.set_attrs('input/zip', nbytes=data.nbytes)
            del data  # save memory

        poll_queue(job_id, _PID, poll_time=15)
        t0 = time.time()
        calc.run(exports=exports,
                 hazard_calculation_id=hazard_calculation_id,
                 close=False, **kw)
        logs.LOG.info('Exposing the outputs to the database')
        expose_outputs(calc.datastore)
        duration = time.time() - t0
        calc._monitor.flush()
        records = views.performance_view(calc.datastore)
        logs.dbcmd('save_performance', job_id, records)
        calc.datastore.close()
        logs.LOG.info('Calculation %d finished correctly in %d seconds',
                      job_id, duration)
        logs.dbcmd('finish', job_id, 'complete')
    except BaseException as exc:
        if isinstance(exc, MasterKilled):
            msg = 'aborted'
        else:
            msg = 'failed'
        tb = traceback.format_exc()
        try:
            logs.LOG.critical(tb)
            logs.dbcmd('finish', job_id, msg)
        except BaseException:  # an OperationalError may always happen
            sys.stderr.write(tb)
        raise
    finally:
        # if there was an error in the calculation, this part may fail;
        # in such a situation, we simply log the cleanup error without
        # taking further action, so that the real error can propagate
        try:
            if OQ_DISTRIBUTE.startswith('celery'):
                celery_cleanup(TERMINATE)
        except BaseException:
            # log the finalization error only if there is no real error
            if tb == 'None\n':
                logs.LOG.error('finalizing', exc_info=True)
    return calc