Example #1
0
def checksum(thing):
    """
    Get the checksum of a calculation from the calculation ID (if already
    done) or from the job.ini/job.zip file (if not done yet). If `thing`
    is a source model logic tree file, get the checksum of the model by
    ignoring the job.ini, the gmpe logic tree file and possibly other files.
    """
    try:
        job_id = int(thing)
        job_file = None
    except ValueError:
        job_id = None
        job_file = thing
        if not os.path.exists(job_file):
            sys.exit('%s does not correspond to an existing file' % job_file)
    if job_id:
        dstore = util.read(job_id)
        checksum = dstore['/'].attrs['checksum32']
    elif job_file.endswith('.xml'):  # assume it is a smlt file
        inputs = {'source_model_logic_tree': job_file}
        checksum = readinput.get_checksum32(mock.Mock(inputs=inputs))
    else:
        oq = readinput.get_oqparam(job_file)
        checksum = readinput.get_checksum32(oq)
    print(checksum)
Example #2
0
def main(thing):
    """
    Get the checksum of a calculation from the calculation ID (if already
    done) or from the job.ini/job.zip file (if not done yet). If `thing`
    is a source model logic tree file, get the checksum of the model by
    ignoring the job.ini, the gmpe logic tree file and possibly other files.
    """
    try:
        job_id = int(thing)
        job_file = None
    except ValueError:
        job_id = None
        job_file = thing
        if not os.path.exists(job_file):
            sys.exit('%s does not correspond to an existing file' % job_file)
    if job_id:
        dstore = util.read(job_id)
        checksum = dstore['/'].attrs['checksum32']
    elif job_file.endswith('.xml'):  # assume it is a smlt file
        inputs = {'source_model_logic_tree': job_file}
        checksum = readinput.get_checksum32(mock.Mock(inputs=inputs))
    else:
        oq = readinput.get_oqparam(job_file)
        checksum = readinput.get_checksum32(oq)
    print(checksum)
Example #3
0
def zip_source_model(ssmLT, archive_zip='', log=logging.info):
    """
    Zip the source model files starting from the smmLT.xml file
    """
    basedir = os.path.dirname(ssmLT)
    if os.path.basename(ssmLT) != 'ssmLT.xml':
        orig = ssmLT
        ssmLT = os.path.join(basedir, 'ssmLT.xml')
        with open(ssmLT, 'wb') as f:
            f.write(open(orig, 'rb').read())

    archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip')
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    smlt = logictree.SourceModelLogicTree(ssmLT)
    files = list(smlt.hdf5_files) + smlt.info.smpaths
    oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT},
                   random_seed=42,
                   number_of_logic_tree_samples=0,
                   sampling_method='early_weights')
    checksum = readinput.get_checksum32(oq)
    checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt')
    with open(checkfile, 'w') as f:
        f.write(str(checksum))
    files.extend([os.path.abspath(ssmLT), os.path.abspath(checkfile)])
    general.zipfiles(files, archive_zip, log=log, cleanup=True)
    return archive_zip
Example #4
0
def smart_run(job_ini, oqparam, log_level, log_file, exports, reuse_hazard):
    """
    Run calculations by storing their hazard checksum and reusing previous
    calculations if requested.
    """
    haz_checksum = readinput.get_checksum32(oqparam, hazard=True)
    # retrieve an old calculation with the right checksum, if any
    job = logs.dbcmd('get_job_from_checksum', haz_checksum)
    reuse = reuse_hazard and job and os.path.exists(job.ds_calc_dir + '.hdf5')
    # recompute the hazard and store the checksum
    ebr = (oqparam.calculation_mode == 'event_based_risk' and
           'gmfs' not in oqparam.inputs)
    if ebr:
        kw = dict(calculation_mode='event_based')
        if (oqparam.sites or 'sites' in oqparam.inputs or
                'site_model' in oqparam.inputs):
            # remove exposure from the hazard
            kw['exposure_file'] = ''
    else:
        kw = {}
    if not reuse:
        hc_id = run_job(job_ini, log_level, log_file, exports, **kw)
        if job is None:
            logs.dbcmd('add_checksum', hc_id, haz_checksum)
        elif not reuse_hazard or not os.path.exists(job.ds_calc_dir + '.hdf5'):
            logs.dbcmd('update_job_checksum', hc_id, haz_checksum)
        if ebr:
            run_job(job_ini, log_level, log_file,
                    exports, hazard_calculation_id=hc_id)
    else:
        hc_id = job.id
        logging.info('Reusing job #%d', job.id)
        run_job(job_ini, log_level, log_file,
                exports, hazard_calculation_id=hc_id)
Example #5
0
def smart_run(job_ini, oqparam, log_level, log_file, exports, reuse_hazard,
              **params):
    """
    Run calculations by storing their hazard checksum and reusing previous
    calculations if requested.
    """
    haz_checksum = readinput.get_checksum32(oqparam, hazard=True)
    # retrieve an old calculation with the right checksum, if any
    job = logs.dbcmd('get_job_from_checksum', haz_checksum)
    reuse = reuse_hazard and job and os.path.exists(job.ds_calc_dir + '.hdf5')
    # recompute the hazard and store the checksum
    if not reuse:
        hc_id = run_job(job_ini, log_level, log_file, exports, **params)
        if job is None:
            logs.dbcmd('add_checksum', hc_id, haz_checksum)
        elif not reuse_hazard or not os.path.exists(job.ds_calc_dir + '.hdf5'):
            logs.dbcmd('update_job_checksum', hc_id, haz_checksum)
    else:
        hc_id = job.id
        logging.info('Reusing job #%d', job.id)
        run_job(job_ini,
                log_level,
                log_file,
                exports,
                hazard_calculation_id=hc_id,
                **params)
Example #6
0
 def __init__(self, job_ini, oqparam, log_level, log_file, exports,
              reuse_hazard):
     self.job_ini = job_ini
     self.oqparam = oqparam
     self.log_level = log_level
     self.log_file = log_file
     self.exports = exports
     checksum = readinput.get_checksum32(oqparam, hazard=True)
     # retrieve an old calculation with the right checksum, if any
     job = logs.dbcmd('get_job_from_checksum', checksum)
     kw = dict(calculation_mode='event_based')
     if (oqparam.sites or 'sites' in oqparam.inputs or
             'site_model' in oqparam.inputs):
         # remove exposure from the hazard
         kw['exposure_file'] = ''
     if job is None:
         # recompute the hazard and store the checksum
         self.hc_id = run_job(job_ini, log_level, log_file, exports, **kw)
         logs.dbcmd('add_checksum', self.hc_id, checksum)
     elif not reuse_hazard or not os.path.exists(job.ds_calc_dir + '.hdf5'):
         # recompute and update the job associated to the checksum
         self.hc_id = run_job(job_ini, log_level, log_file, exports, **kw)
         logs.dbcmd('update_job_checksum', self.hc_id, checksum)
     else:
         # sanity check
         assert job.description == oqparam.description, (
             job.description, oqparam.description)
         self.hc_id = job.id
         logging.info('Reusing job #%d', job.id)
Example #7
0
def zip_source_model(ssmLT, archive_zip='', log=logging.info):
    """
    Zip the source model files starting from the smmLT.xml file
    """
    basedir = os.path.dirname(ssmLT)
    if os.path.basename(ssmLT) != 'ssmLT.xml':
        orig = ssmLT
        ssmLT = os.path.join(basedir, 'ssmLT.xml')
        with open(ssmLT, 'wb') as f:
            f.write(open(orig, 'rb').read())

    archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip')
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT})
    checksum = readinput.get_checksum32(oq)
    checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt')
    with open(checkfile, 'w') as f:
        f.write(str(checksum))
    files = logictree.collect_info(ssmLT).smpaths + [
        os.path.abspath(ssmLT),
        os.path.abspath(checkfile)
    ]
    general.zipfiles(files, archive_zip, log=log, cleanup=True)
    return archive_zip
Example #8
0
def smart_run(job_ini, oqparam, log_level, log_file, exports, reuse_hazard):
    """
    Run calculations by storing their hazard checksum and reusing previous
    calculations if requested.
    """
    haz_checksum = readinput.get_checksum32(oqparam, hazard=True)
    # retrieve an old calculation with the right checksum, if any
    job = logs.dbcmd('get_job_from_checksum', haz_checksum)
    reuse = reuse_hazard and job and os.path.exists(job.ds_calc_dir + '.hdf5')
    # recompute the hazard and store the checksum
    ebr = (oqparam.calculation_mode == 'event_based_risk' and
           'gmfs' not in oqparam.inputs)
    if ebr:
        kw = dict(calculation_mode='event_based')
        if (oqparam.sites or 'sites' in oqparam.inputs or
                'site_model' in oqparam.inputs):
            # remove exposure from the hazard
            kw['exposure_file'] = ''
    else:
        kw = {}
    if not reuse:
        hc_id = run_job(job_ini, log_level, log_file, exports, **kw)
        if job is None:
            logs.dbcmd('add_checksum', hc_id, haz_checksum)
        elif not reuse_hazard or not os.path.exists(job.ds_calc_dir + '.hdf5'):
            logs.dbcmd('update_job_checksum', hc_id, haz_checksum)
        if ebr:
            run_job(job_ini, log_level, log_file,
                    exports, hazard_calculation_id=hc_id)
    else:
        hc_id = job.id
        logging.info('Reusing job #%d', job.id)
        run_job(job_ini, log_level, log_file,
                exports, hazard_calculation_id=hc_id)
Example #9
0
 def save_params(self, **kw):
     """
     Update the current calculation parameters and save engine_version
     """
     vars(self.oqparam).update(**kw)
     self.datastore['oqparam'] = self.oqparam  # save the updated oqparam
     attrs = self.datastore['/'].attrs
     attrs['engine_version'] = engine_version
     if 'checksum32' not in attrs:
         attrs['checksum32'] = readinput.get_checksum32(self.oqparam)
     self.datastore.flush()
Example #10
0
def smart_run(job_ini, oqparam, log_level, log_file, exports, reuse_hazard):
    """
    Run calculations by storing their hazard checksum and reusing previous
    calculations if requested.
    """
    haz_checksum = readinput.get_checksum32(oqparam, hazard=True)
    # retrieve an old calculation with the right checksum, if any
    job = logs.dbcmd('get_job_from_checksum', haz_checksum)
    reuse = reuse_hazard and job and os.path.exists(job.ds_calc_dir + '.hdf5')
    # recompute the hazard and store the checksum
    ebr = (oqparam.calculation_mode == 'event_based_risk'
           and 'gmfs' not in oqparam.inputs)
    if ebr:
        kw = dict(calculation_mode='event_based')
        if (oqparam.sites or 'sites' in oqparam.inputs
                or 'site_model' in oqparam.inputs):
            # remove exposure from the hazard
            kw['exposure_file'] = ''
    else:
        kw = {}
    if not reuse:
        hc_id = run_job(job_ini, log_level, log_file, exports, **kw)
        if job is None:
            logs.dbcmd('add_checksum', hc_id, haz_checksum)
        elif not reuse_hazard or not os.path.exists(job.ds_calc_dir + '.hdf5'):
            logs.dbcmd('update_job_checksum', hc_id, haz_checksum)
        if ebr:
            job_id = run_job(job_ini,
                             log_level,
                             log_file,
                             exports,
                             hazard_calculation_id=hc_id)
        else:
            job_id = hc_id
    else:
        hc_id = job.id
        logging.info('Reusing job #%d', job.id)
        job_id = run_job(job_ini,
                         log_level,
                         log_file,
                         exports,
                         hazard_calculation_id=hc_id)
    if ebr and oqparam.aggregate_by:
        logging.info('Exporting aggregated data')
        dstore = util.read(job_id)
        aggby = 'aggregate_by/%s/' % ','.join(oqparam.aggregate_by)
        fnames = []
        fnames.extend(export((aggby + 'avg_losses', 'csv'), dstore))
        fnames.extend(export((aggby + 'curves', 'csv'), dstore))
        for fname in fnames:
            logging.info('Exported %s', fname)
Example #11
0
 def save_params(self, **kw):
     """
     Update the current calculation parameters and save engine_version
     """
     if ('hazard_calculation_id' in kw
             and kw['hazard_calculation_id'] is None):
         del kw['hazard_calculation_id']
     vars(self.oqparam).update(**kw)
     self.datastore['oqparam'] = self.oqparam  # save the updated oqparam
     attrs = self.datastore['/'].attrs
     attrs['engine_version'] = engine_version
     attrs['date'] = datetime.now().isoformat()[:19]
     if 'checksum32' not in attrs:
         attrs['checksum32'] = readinput.get_checksum32(self.oqparam)
     self.datastore.flush()
Example #12
0
 def save_params(self, **kw):
     """
     Update the current calculation parameters and save engine_version
     """
     if ('hazard_calculation_id' in kw and
             kw['hazard_calculation_id'] is None):
         del kw['hazard_calculation_id']
     vars(self.oqparam).update(**kw)
     self.datastore['oqparam'] = self.oqparam  # save the updated oqparam
     attrs = self.datastore['/'].attrs
     attrs['engine_version'] = engine_version
     attrs['date'] = datetime.now().isoformat()[:19]
     if 'checksum32' not in attrs:
         attrs['checksum32'] = readinput.get_checksum32(self.oqparam)
     self.datastore.flush()
Example #13
0
def checksum(job_file_or_job_id):
    """
    Get the checksum of a calculation from the calculation ID (if already
    done) or from the job.ini/job.zip file (if not done yet).
    """
    try:
        job_id = int(job_file_or_job_id)
        job_file = None
    except ValueError:
        job_id = None
        job_file = job_file_or_job_id
        if not os.path.exists(job_file):
            sys.exit('%s does not correspond to an existing file' % job_file)
    if job_id:
        dstore = datastore.read(job_id)
        checksum = dstore['/'].attrs['checksum32']
    else:
        oq = readinput.get_oqparam(job_file)
        checksum = readinput.get_checksum32(oq)
    print(checksum)
Example #14
0
def zip_source_model(ssmLT, archive_zip='', log=logging.info):
    """
    Zip the source model files starting from the smmLT.xml file
    """
    basedir = os.path.dirname(ssmLT)
    if os.path.basename(ssmLT) != 'ssmLT.xml':
        orig = ssmLT
        ssmLT = os.path.join(basedir, 'ssmLT.xml')
        with open(ssmLT, 'wb') as f:
            f.write(open(orig, 'rb').read())

    archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip')
    if os.path.exists(archive_zip):
        sys.exit('%s exists already' % archive_zip)
    oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT})
    checksum = readinput.get_checksum32(oq)
    checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt')
    with open(checkfile, 'w') as f:
        f.write(str(checksum))
    files = [os.path.abspath(ssmLT), os.path.abspath(checkfile)]
    for fs in logictree.collect_info(ssmLT).smpaths.values():
        files.extend(fs)
    general.zipfiles(files, archive_zip, log=log, cleanup=True)
    return archive_zip