Exemplo n.º 1
0
def dump(archive, calc_id=0, user=None):
    """
    Dump the openquake database and all the complete calculations into a zip
    file. In a multiuser installation must be run as administrator.
    """
    t0 = time.time()
    assert archive.endswith('.zip'), archive
    getfnames = 'select ds_calc_dir || ".hdf5" from job where ?A'
    param = dict(status='complete')
    if calc_id:
        param['id'] = calc_id
    if user:
        param['user_name'] = user
    fnames = [f for f, in db(getfnames, param) if os.path.exists(f)]
    zipfiles(fnames, archive, 'w', safeprint)
    pending_jobs = db('select id, status, description from job '
                      'where status="executing"')
    if pending_jobs:
        safeprint('WARNING: there were calculations executing during the dump,'
                  ' they have been not copied')
        for job_id, status, descr in pending_jobs:
            safeprint('%d %s %s' % (job_id, status, descr))

    # this also checks that the copied db is not corrupted
    smart_save(db.path, archive, calc_id)

    dt = time.time() - t0
    safeprint('Archived %d calculations into %s in %d seconds' %
              (len(fnames), archive, dt))
Exemplo n.º 2
0
def dump(archive, calc_id=0, user=None):
    """
    Dump the openquake database and all the complete calculations into a zip
    file. In a multiuser installation must be run as administrator.
    """
    t0 = time.time()
    assert archive.endswith('.zip'), archive
    getfnames = 'select ds_calc_dir || ".hdf5" from job where ?A'
    param = dict(status='complete')
    if calc_id:
        param['id'] = calc_id
    if user:
        param['user_name'] = user
    fnames = [f for f, in db(getfnames, param) if os.path.exists(f)]
    zipfiles(fnames, archive, 'w', safeprint)
    pending_jobs = db('select id, status, description from job '
                      'where status="executing"')
    if pending_jobs:
        safeprint('WARNING: there were calculations executing during the dump,'
                  ' they have been not copied')
        for job_id, status, descr in pending_jobs:
            safeprint('%d %s %s' % (job_id, status, descr))

    # this also checks that the copied db is not corrupted
    smart_save(db.path, archive, calc_id)

    dt = time.time() - t0
    safeprint('Archived %d calculations into %s in %d seconds'
              % (len(fnames), archive, dt))
Exemplo n.º 3
0
def zip_source_model(ssmLT, archive_zip='', log=logging.info):
    """
    Zip the source model files starting from the smmLT.xml file
    """
    basedir = os.path.dirname(ssmLT)
    if os.path.basename(ssmLT) != 'ssmLT.xml':
        orig = ssmLT
        ssmLT = os.path.join(basedir, 'ssmLT.xml')
        with open(ssmLT, 'wb') as f:
            f.write(open(orig, 'rb').read())

    archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip')
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    smlt = logictree.SourceModelLogicTree(ssmLT)
    files = list(smlt.hdf5_files) + smlt.info.smpaths
    oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT},
                   random_seed=42,
                   number_of_logic_tree_samples=0,
                   sampling_method='early_weights')
    checksum = readinput.get_checksum32(oq)
    checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt')
    with open(checkfile, 'w') as f:
        f.write(str(checksum))
    files.extend([os.path.abspath(ssmLT), os.path.abspath(checkfile)])
    general.zipfiles(files, archive_zip, log=log, cleanup=True)
    return archive_zip
Exemplo n.º 4
0
def export_from_db(output_key, calc_id, datadir, target):
    """
    :param output_key: a pair (ds_key, fmt)
    :param calc_id: calculation ID
    :param datadir: directory containing the datastore
    :param target: directory, temporary when called from the engine server
    """
    makedirs(target)
    export.from_db = True
    ds_key, fmt = output_key
    with datastore.read(calc_id, datadir=datadir) as dstore:
        dstore.export_dir = target
        try:
            exported = export(output_key, dstore)
        except Exception:
            etype, err, tb = sys.exc_info()
            tb_str = ''.join(traceback.format_tb(tb))
            version = check_version(dstore)
            raise DataStoreExportError('Could not export %s in %s%s\n%s%s' %
                                       (output_key + (version, tb_str, err)))
        if not exported:
            raise DataStoreExportError('Nothing to export for %s' % ds_key)
        elif len(exported) > 1:
            # NB: I am hiding the archive by starting its name with a '.',
            # to avoid confusing the users, since the unzip files are
            # already in the target directory; the archive is used internally
            # by the WebUI, so it must be there; it would be nice not to
            # generate it when not using the Web UI, but I will leave that
            # feature for after the removal of the old calculators
            archname = '.' + ds_key + '-' + fmt + '.zip'
            general.zipfiles(exported, os.path.join(target, archname))
            return os.path.join(target, archname)
        else:  # single file
            return exported[0]
Exemplo n.º 5
0
def zip(job_ini, archive_zip, risk_ini, oq=None, log=logging.info):
    """
    Zip the given job.ini file into the given archive, together with all
    related files.
    """
    if not os.path.exists(job_ini):
        sys.exit('%s does not exist' % job_ini)
    if isinstance(archive_zip, str):  # actually it should be path-like
        if not archive_zip.endswith('.zip'):
            sys.exit('%s does not end with .zip' % archive_zip)
        if os.path.exists(archive_zip):
            sys.exit('%s exists already' % archive_zip)
    logging.basicConfig(level=logging.INFO)
    # do not validate to avoid permissions error on the export_dir
    oq = oq or readinput.get_oqparam(job_ini, validate=False)
    files = set()
    if risk_ini:
        risk_ini = os.path.normpath(os.path.abspath(risk_ini))
        oq.inputs.update(readinput.get_params([risk_ini])['inputs'])
        files.add(os.path.normpath(os.path.abspath(job_ini)))

    # collect .hdf5 tables for the GSIMs, if any
    if 'gsim_logic_tree' in oq.inputs or oq.gsim:
        gsim_lt = readinput.get_gsim_lt(oq)
        for gsims in gsim_lt.values.values():
            for gsim in gsims:
                table = getattr(gsim, 'GMPE_TABLE', None)
                if table:
                    files.add(table)

    # collect exposure.csv, if any
    exposure_xml = oq.inputs.get('exposure')
    if exposure_xml:
        dname = os.path.dirname(exposure_xml)
        expo = nrml.read(exposure_xml, stop='asset')[0]
        if not expo.assets:
            exposure_csv = (~expo.assets).strip()
            for csv in exposure_csv.split():
                if csv and os.path.exists(os.path.join(dname, csv)):
                    files.add(os.path.join(dname, csv))

    # collection .hdf5 UCERF file, if any
    if oq.calculation_mode.startswith('ucerf_'):
        sm = nrml.read(oq.inputs['source_model'])
        fname = sm.sourceModel.UCERFSource['filename']
        f = os.path.join(os.path.dirname(oq.inputs['source_model']), fname)
        files.add(os.path.normpath(f))

    # collect all other files
    for key in oq.inputs:
        fname = oq.inputs[key]
        if isinstance(fname, list):
            for f in fname:
                files.add(os.path.normpath(f))
        elif isinstance(fname, dict):
            for f in fname.values():
                files.add(os.path.normpath(f))
        else:
            files.add(os.path.normpath(fname))
    general.zipfiles(files, archive_zip, log=log)
Exemplo n.º 6
0
def zip_source_model(ssmLT, archive_zip='', log=logging.info):
    """
    Zip the source model files starting from the smmLT.xml file
    """
    basedir = os.path.dirname(ssmLT)
    if os.path.basename(ssmLT) != 'ssmLT.xml':
        orig = ssmLT
        ssmLT = os.path.join(basedir, 'ssmLT.xml')
        with open(ssmLT, 'wb') as f:
            f.write(open(orig, 'rb').read())

    archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip')
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT})
    checksum = readinput.get_checksum32(oq)
    checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt')
    with open(checkfile, 'w') as f:
        f.write(str(checksum))
    files = logictree.collect_info(ssmLT).smpaths + [
        os.path.abspath(ssmLT),
        os.path.abspath(checkfile)
    ]
    general.zipfiles(files, archive_zip, log=log, cleanup=True)
    return archive_zip
Exemplo n.º 7
0
def zip(job_ini, archive_zip):
    """
    Zip the given job.ini file into the given archive, together with all
    related files.
    """
    if not os.path.exists(job_ini):
        sys.exit('%s does not exist' % job_ini)
    if not archive_zip.endswith('.zip'):
        sys.exit('%s does not end with .zip' % archive_zip)
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    logging.basicConfig(level=logging.INFO)
    oq = readinput.get_oqparam(job_ini)
    files = set()

    # collect .hdf5 tables for the GSIMs, if any
    if 'gsim_logic_tree' in oq.inputs or oq.gsim:
        gsim_lt = readinput.get_gsim_lt(oq)
        for gsims in gsim_lt.values.values():
            for gsim in gsims:
                table = getattr(gsim, 'GMPE_TABLE', None)
                if table:
                    files.add(table)

    # collect all other files
    for key in oq.inputs:
        fname = oq.inputs[key]
        if isinstance(fname, list):
            for f in fname:
                files.add(os.path.normpath(f))
        else:
            files.add(os.path.normpath(fname))
    general.zipfiles(files, archive_zip, log=logging.info)
Exemplo n.º 8
0
def zip_exposure(exposure_xml, archive_zip='', log=logging.info):
    """
    Zip an exposure.xml file with all its .csv subfiles (if any)
    """
    archive_zip = archive_zip or exposure_xml[:-4] + '.zip'
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    [exp] = Exposure.read_headers([exposure_xml])
    files = [exposure_xml] + exp.datafiles
    general.zipfiles(files, archive_zip, log=log, cleanup=True)
    return archive_zip
Exemplo n.º 9
0
def zip_exposure(exposure_xml, archive_zip='', log=logging.info):
    """
    Zip an exposure.xml file with all its .csv subfiles (if any)
    """
    archive_zip = archive_zip or exposure_xml[:-4] + '.zip'
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    [exp] = Exposure.read_headers([exposure_xml])
    files = [exposure_xml] + exp.datafiles
    general.zipfiles(files, archive_zip, log=log, cleanup=True)
    return archive_zip
Exemplo n.º 10
0
def smart_save(dbpath, archive):
    """
    Make a copy of the db, remove the incomplete jobs and add the copy
    to the archive
    """
    tmpdir = tempfile.mkdtemp()
    newdb = os.path.join(tmpdir, os.path.basename(dbpath))
    shutil.copy(dbpath, newdb)
    try:
        with sqlite3.connect(newdb) as conn:
            conn.execute('DELETE FROM job WHERE status != "complete"')
    except:
        safeprint('Please check the copy of the db in %s' % newdb)
        raise
    zipfiles([newdb], archive, 'a', safeprint)
    shutil.rmtree(tmpdir)
Exemplo n.º 11
0
def zip_job(job_ini, archive_zip='', risk_ini='', oq=None, log=logging.info):
    """
    Zip the given job.ini file into the given archive, together with all
    related files.
    """
    if not os.path.exists(job_ini):
        sys.exit('%s does not exist' % job_ini)
    archive_zip = archive_zip or 'job.zip'
    if isinstance(archive_zip, str):  # actually it should be path-like
        if not archive_zip.endswith('.zip'):
            sys.exit('%s does not end with .zip' % archive_zip)
        if os.path.exists(archive_zip):
            sys.exit('%s exists already' % archive_zip)
    # do not validate to avoid permissions error on the export_dir
    oq = oq or oqvalidation.OqParam(**readinput.get_params(job_ini))
    if risk_ini:
        risk_ini = os.path.normpath(os.path.abspath(risk_ini))
        oqr = readinput.get_oqparam(risk_ini)
        del oqr.inputs['job_ini']
        oq.inputs.update(oqr.inputs)
        oq.shakemap_uri.update(oqr.shakemap_uri)
    files = readinput.get_input_files(oq)
    if risk_ini:
        files = [risk_ini] + files
    return general.zipfiles(files, archive_zip, log=log)
Exemplo n.º 12
0
def smart_save(dbpath, archive, calc_id):
    """
    Make a copy of the db, remove the incomplete jobs and add the copy
    to the archive
    """
    tmpdir = tempfile.mkdtemp()
    newdb = os.path.join(tmpdir, os.path.basename(dbpath))
    shutil.copy(dbpath, newdb)
    try:
        with sqlite3.connect(newdb) as conn:
            conn.execute('DELETE FROM job WHERE status != "complete"')
            if calc_id:
                conn.execute('DELETE FROM job WHERE id != %d' % calc_id)
    except:
        safeprint('Please check the copy of the db in %s' % newdb)
        raise
    zipfiles([newdb], archive, 'a', safeprint)
    shutil.rmtree(tmpdir)
Exemplo n.º 13
0
def zip_source_model(ssmLT, archive_zip='', log=logging.info):
    """
    Zip the source model files starting from the smmLT.xml file
    """
    basedir = os.path.dirname(ssmLT)
    if os.path.basename(ssmLT) != 'ssmLT.xml':
        orig = ssmLT
        ssmLT = os.path.join(basedir, 'ssmLT.xml')
        with open(ssmLT, 'wb') as f:
            f.write(open(orig, 'rb').read())

    archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip')
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT})
    checksum = readinput.get_checksum32(oq)
    checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt')
    with open(checkfile, 'w') as f:
        f.write(str(checksum))
    files = [os.path.abspath(ssmLT), os.path.abspath(checkfile)]
    for fs in logictree.collect_info(ssmLT).smpaths.values():
        files.extend(fs)
    general.zipfiles(files, archive_zip, log=log, cleanup=True)
    return archive_zip
Exemplo n.º 14
0
def zip_job(job_ini, archive_zip='', risk_ini='', oq=None, log=logging.info):
    """
    Zip the given job.ini file into the given archive, together with all
    related files.
    """
    if not os.path.exists(job_ini):
        sys.exit('%s does not exist' % job_ini)
    archive_zip = archive_zip or 'job.zip'
    if isinstance(archive_zip, str):  # actually it should be path-like
        if not archive_zip.endswith('.zip'):
            sys.exit('%s does not end with .zip' % archive_zip)
        if os.path.exists(archive_zip):
            sys.exit('%s exists already' % archive_zip)
    # do not validate to avoid permissions error on the export_dir
    oq = oq or readinput.get_oqparam(job_ini, validate=False)
    if risk_ini:
        risk_ini = os.path.normpath(os.path.abspath(risk_ini))
        risk_inputs = readinput.get_params([risk_ini])['inputs']
        del risk_inputs['job_ini']
        oq.inputs.update(risk_inputs)
    files = readinput.get_input_files(oq)
    if risk_ini:
        files = [risk_ini] + files
    return general.zipfiles(files, archive_zip, log=log)
Exemplo n.º 15
0
def calc_result(request, result_id):
    """
    Download a specific result, by ``result_id``.

    The common abstracted functionality for getting hazard or risk results.

    :param request:
        `django.http.HttpRequest` object. Can contain a `export_type` GET
        param (the default is 'xml' if no param is specified).
    :param result_id:
        The id of the requested artifact.
    :returns:
        If the requested ``result_id`` is not available in the format
        designated by the `export_type`.

        Otherwise, return a `django.http.HttpResponse` containing the content
        of the requested artifact.

    Parameters for the GET request can include an `export_type`, such as 'xml',
    'geojson', 'csv', etc.
    """
    # If the result for the requested ID doesn't exist, OR
    # the job which it is related too is not complete,
    # throw back a 404.
    try:
        job_id, job_status, job_user, datadir, ds_key = logs.dbcmd(
            'get_result', result_id)
        if not utils.user_has_permission(request, job_user):
            return HttpResponseForbidden()
    except dbapi.NotFound:
        return HttpResponseNotFound()

    etype = request.GET.get('export_type')
    export_type = etype or DEFAULT_EXPORT_TYPE

    tmpdir = tempfile.mkdtemp()
    try:
        exported = core.export_from_db((ds_key, export_type), job_id, datadir,
                                       tmpdir)
    except DataStoreExportError as exc:
        # TODO: there should be a better error page
        return HttpResponse(content='%s: %s' % (exc.__class__.__name__, exc),
                            content_type='text/plain',
                            status=500)
    if not exported:
        # Throw back a 404 if the exact export parameters are not supported
        return HttpResponseNotFound(
            'Nothing to export for export_type=%s, %s' % (export_type, ds_key))
    elif len(exported) > 1:
        # Building an archive so that there can be a single file download
        archname = ds_key + '-' + export_type + '.zip'
        zipfiles(exported, os.path.join(tmpdir, archname))
        exported = os.path.join(tmpdir, archname)
    else:  # single file
        exported = exported[0]

    content_type = EXPORT_CONTENT_TYPE_MAP.get(export_type,
                                               DEFAULT_CONTENT_TYPE)

    fname = 'output-%s-%s' % (result_id, os.path.basename(exported))
    stream = FileWrapper(open(exported, 'rb'))  # 'b' is needed on Windows
    stream.close = lambda: (FileWrapper.close(stream), shutil.rmtree(tmpdir))
    response = FileResponse(stream, content_type=content_type)
    response['Content-Disposition'] = ('attachment; filename=%s' %
                                       os.path.basename(fname))
    response['Content-Length'] = str(os.path.getsize(exported))
    return response
Exemplo n.º 16
0
def calc_result(request, result_id):
    """
    Download a specific result, by ``result_id``.

    The common abstracted functionality for getting hazard or risk results.

    :param request:
        `django.http.HttpRequest` object. Can contain a `export_type` GET
        param (the default is 'xml' if no param is specified).
    :param result_id:
        The id of the requested artifact.
    :returns:
        If the requested ``result_id`` is not available in the format
        designated by the `export_type`.

        Otherwise, return a `django.http.HttpResponse` containing the content
        of the requested artifact.

    Parameters for the GET request can include an `export_type`, such as 'xml',
    'geojson', 'csv', etc.
    """
    # If the result for the requested ID doesn't exist, OR
    # the job which it is related too is not complete,
    # throw back a 404.
    try:
        job_id, job_status, job_user, datadir, ds_key = logs.dbcmd(
            'get_result', result_id)
        if not utils.user_has_permission(request, job_user):
            return HttpResponseForbidden()
    except dbapi.NotFound:
        return HttpResponseNotFound()

    etype = request.GET.get('export_type')
    export_type = etype or DEFAULT_EXPORT_TYPE

    tmpdir = tempfile.mkdtemp()
    try:
        exported = core.export_from_db(
            (ds_key, export_type), job_id, datadir, tmpdir)
    except DataStoreExportError as exc:
        # TODO: there should be a better error page
        return HttpResponse(content='%s: %s' % (exc.__class__.__name__, exc),
                            content_type='text/plain', status=500)
    if not exported:
        # Throw back a 404 if the exact export parameters are not supported
        return HttpResponseNotFound(
            'Nothing to export for export_type=%s, %s' % (export_type, ds_key))
    elif len(exported) > 1:
        # Building an archive so that there can be a single file download
        archname = ds_key + '-' + export_type + '.zip'
        zipfiles(exported, os.path.join(tmpdir, archname))
        exported = os.path.join(tmpdir, archname)
    else:  # single file
        exported = exported[0]

    content_type = EXPORT_CONTENT_TYPE_MAP.get(
        export_type, DEFAULT_CONTENT_TYPE)

    fname = 'output-%s-%s' % (result_id, os.path.basename(exported))
    stream = FileWrapper(open(exported, 'rb'))  # 'b' is needed on Windows
    stream.close = lambda: (
        FileWrapper.close(stream), shutil.rmtree(tmpdir))
    response = FileResponse(stream, content_type=content_type)
    response['Content-Disposition'] = (
        'attachment; filename=%s' % os.path.basename(fname))
    response['Content-Length'] = str(os.path.getsize(exported))
    return response