def plot(calc_id, other_id=None, sites='0'): """ Hazard curves plotter. :param calc_id: calculation numeric ID :param other_id: ID of another calculation (optional) :param sites: comma-separated string with the site indices """ # read the hazard data haz = datastore.DataStore(calc_id) other = datastore.DataStore(other_id) if other_id else None oq = oqvalidation.OqParam.from_(haz.attrs) indices = list(map(int, sites.split(','))) n_sites = len(haz['sites']) if not set(indices) <= set(range(n_sites)): invalid = sorted(set(indices) - set(range(n_sites))) print('The indices %s are invalid: no graph for them' % invalid) valid = sorted(set(range(n_sites)) & set(indices)) print('Found %d site(s); plotting %d of them' % (n_sites, len(valid))) curves_by_rlz, mean_curves = combined_curves(haz) if other is None: single_curve = len(curves_by_rlz) == 1 or not getattr( oq, 'individual_curves', True) plt = make_figure(valid, oq.imtls, mean_curves, {} if single_curve else curves_by_rlz, 'mean') else: _, mean1 = combined_curves(haz) _, mean2 = combined_curves(other) plt = make_figure(valid, oq.imtls, mean1, {'mean': mean2}, 'reference') plt.show()
def show(calc_id, key=None, rlzs=None): """ Show the content of a datastore. :param calc_id: numeric calculation ID; if 0, show all calculations :param key: key of the datastore :param rlzs: flag; if given, print out the realizations in order """ if not calc_id: if not os.path.exists(datastore.DATADIR): return rows = [] for calc_id in datastore.get_calc_ids(datastore.DATADIR): try: oq = OqParam.from_(datastore.DataStore(calc_id).attrs) cmode, descr = oq.calculation_mode, oq.description except: # invalid datastore directory logging.warn('Removed invalid calculation %d', calc_id) shutil.rmtree( os.path.join(datastore.DATADIR, 'calc_%s' % calc_id)) else: rows.append((calc_id, cmode, descr)) for row in sorted(rows, key=lambda row: row[0]): # by calc_id print('#%d %s: %s' % row) return ds = datastore.DataStore(calc_id) if key: if key in datastore.view: print(datastore.view(key, ds)) return obj = ds[key] if hasattr(obj, 'value'): # an array print(write_csv(io.StringIO(), obj.value)) else: print(obj) return # print all keys oq = OqParam.from_(ds.attrs) print( oq.calculation_mode, 'calculation (%r) saved in %s contains:' % (oq.description, ds.calc_dir)) for key in ds: print(key, humansize(ds.getsize(key))) # this part is experimental and not tested on purpose if rlzs and 'curves_by_trt_gsim' in ds: min_value = 0.01 # used in rmsep curves_by_rlz, mean_curves = combined_curves(ds) dists = [] for rlz in sorted(curves_by_rlz): curves = curves_by_rlz[rlz] dist = sum( rmsep(mean_curves[imt], curves[imt], min_value) for imt in mean_curves.dtype.fields) dists.append((dist, rlz)) for dist, rlz in sorted(dists): print('rlz=%s, rmsep=%s' % (rlz, dist))
def pre_execute(self): """ Check if there is a pre_calculator or a previous calculation ID. If yes, read the inputs by invoking the precalculator or by retrieving the previous calculation; if not, read the inputs directly. """ if self.pre_calculator is not None: # the parameter hazard_calculation_id is only meaningful if # there is a precalculator precalc_id = self.oqparam.hazard_calculation_id if precalc_id is None: # recompute everything precalc = calculators[self.pre_calculator]( self.oqparam, self.monitor('precalculator'), self.datastore.calc_id) precalc.run(clean_up=False) if 'scenario' not in self.oqparam.calculation_mode: self.csm = precalc.csm else: # read previously computed data self.datastore.parent = datastore.DataStore(precalc_id) # merge old oqparam into the new ones, when possible new = vars(self.oqparam) for name, value in self.datastore.parent['oqparam']: if name not in new: # add missing parameter new[name] = value self.oqparam = self.oqparam self.read_exposure_sitecol() else: # we are in a basic calculator self.read_exposure_sitecol() self.read_sources() self.datastore.hdf5.flush()
def pre_execute(self): """ Check if there is a pre_calculator or a previous calculation ID. If yes, read the inputs by invoking the precalculator or by retrieving the previous calculation; if not, read the inputs directly. """ if self.pre_calculator is not None: # the parameter hazard_calculation_id is only meaningful if # there is a precalculator precalc_id = self.oqparam.hazard_calculation_id if precalc_id is None: # recompute everything precalc = calculators[self.pre_calculator]( self.oqparam, self.monitor('precalculator'), self.datastore.calc_id) precalc.run() if 'scenario' not in self.oqparam.calculation_mode: self.csm = precalc.csm else: # read previously computed data parent = datastore.DataStore(precalc_id) self.datastore.set_parent(parent) # update oqparam with the attributes saved in the datastore self.oqparam = OqParam.from_(self.datastore.attrs) self.read_risk_data() else: # we are in a basic calculator self.read_risk_data() self.read_sources() self.datastore.hdf5.flush()
def __init__(self, oqparam, monitor=Monitor(), calc_id=None): self.monitor = monitor self.datastore = datastore.DataStore(calc_id) self.monitor.calc_id = self.datastore.calc_id self.monitor.hdf5path = self.datastore.hdf5path self.datastore.export_dir = oqparam.export_dir self.oqparam = oqparam
def build_report(job_ini, output_dir=None): """ Write a `report.csv` file with information about the calculation. :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory) """ oq = readinput.get_oqparam(job_ini) output_dir = output_dir or os.path.dirname(job_ini) calc = base.calculators(oq) calc.pre_execute() ds = datastore.DataStore(calc.datastore.calc_id) rw = ReportWriter(ds) report = os.path.join(output_dir, 'report.rst') for name in ('params', 'inputs'): rw.add(name) if 'scenario' not in oq.calculation_mode: rw.add('csm_info') rw.add('rlzs_assoc', calc.rlzs_assoc) if 'num_ruptures' in ds: rw.add('rupture_collections') rw.add('col_rlz_assocs') if oq.calculation_mode in ('classical', 'event_based', 'ebr'): rw.add('data_transfer') rw.save(report) return report
def export(calc_id, datastore_key, format='csv', export_dir='.'): """ Export an output from the datastore. """ logging.basicConfig(level=logging.INFO) dstore = datastore.DataStore(calc_id) dstore.export_dir = export_dir hc_id = dstore['oqparam'].hazard_calculation_id if hc_id: dstore.parent = datastore.DataStore(hc_id) with performance.Monitor('export', measuremem=True) as mon: for fmt in format.split(','): fnames = export_((datastore_key, fmt), dstore) nbytes = sum(os.path.getsize(f) for f in fnames) print('Exported %s in %s' % (general.humansize(nbytes), fnames)) if mon.duration > 1: print(mon)
def purge(calc_id): """ Remove the given calculation. If calc_id is 0, remove all calculations. """ if not calc_id: shutil.rmtree(datastore.DATADIR) print('Removed %s' % datastore.DATADIR) else: hdf5path = datastore.DataStore(calc_id).hdf5path os.remove(hdf5path) print('Removed %s' % hdf5path)
def purge(calc_id): """ Remove the given calculation. If calc_id is 0, remove all calculations. """ if not calc_id: shutil.rmtree(datastore.DATADIR) print('Removed %s' % datastore.DATADIR) else: calc_dir = datastore.DataStore(calc_id).calc_dir shutil.rmtree(calc_dir) print('Removed %s' % calc_dir)
def __init__(self, oqparam, monitor=DummyMonitor(), calc_id=None, persistent=True): self.monitor = monitor if persistent: self.datastore = datastore.DataStore(calc_id) else: self.datastore = general.AccumDict() self.datastore.hdf5 = {} self.datastore.attrs = {} self.datastore.export_dir = oqparam.export_dir self.oqparam = oqparam self.persistent = persistent
def __init__(self, oqparam, monitor=DummyMonitor(), calc_id=None, persistent=True): self.monitor = monitor if persistent: self.datastore = datastore.DataStore(calc_id) else: self.datastore = general.AccumDict() self.datastore.hdf5 = {} self.datastore.export_dir = oqparam.export_dir if 'oqparam' not in self.datastore: # new datastore self.oqparam = oqparam # else we are doing a precalculation; oqparam has been already stored self.persistent = persistent
def show_attrs(calc_id, key): """ Show the attributes of a HDF5 dataset in the datastore :param calc_id: numeric calculation ID :param key: key of the datastore """ ds = datastore.DataStore(calc_id) try: attrs = ds[key].attrs except KeyError: print('%r is not in %s' % (key, ds)) return if len(attrs) == 0: print('%s has no attributes' % key) for name, value in attrs.items(): print(name, value)
def build_report(job_ini, output_dir=None): """ Write a `report.csv` file with information about the calculation. :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory) """ oq = readinput.get_oqparam(job_ini) output_dir = output_dir or os.path.dirname(job_ini) calc = base.calculators(oq) calc.pre_execute() calc.save_params() ds = datastore.DataStore(calc.datastore.calc_id) rw = ReportWriter(ds) rw.make_report() report = (os.path.join(output_dir, 'report.rst') if output_dir else ds.export_path('report.rst')) try: rw.save(report) except IOError as exc: # permission error sys.stderr.write(str(exc) + '\n') return report
def __init__(self, oqparam, monitor=Monitor(), calc_id=None): self._monitor = monitor self.datastore = datastore.DataStore(calc_id) self.oqparam = oqparam
def get_datastore(calc): ds = datastore.DataStore(calc.datastore.calc_id) hc_id = ds['oqparam'].hazard_calculation_id if hc_id: ds.parent = datastore.DataStore(int(hc_id)) return ds
def get_datastore(calc): ds = datastore.DataStore(calc.datastore.calc_id) hc_id = ds.attrs.get('hazard_calculation_id') if hc_id: ds.parent = datastore.DataStore(int(hc_id)) return ds
def show(calc_id, key=None, rlzs=None): """ Show the content of a datastore. :param calc_id: numeric calculation ID; if 0, show all calculations :param key: key of the datastore :param rlzs: flag; if given, print out the realizations in order """ if calc_id == 0: # show all if not os.path.exists(datastore.DATADIR): return rows = [] for calc_id in datastore.get_calc_ids(datastore.DATADIR): try: ds = datastore.DataStore(calc_id, mode='r') oq = OqParam.from_(ds.attrs) cmode, descr = oq.calculation_mode, oq.description except: # invalid datastore file, or missing calculation_mode # and description attributes, perhaps due to a manual kill logging.warn('Removed invalid calculation %d', calc_id) os.remove( os.path.join(datastore.DATADIR, 'calc_%s.hdf5' % calc_id)) continue else: rows.append((calc_id, cmode, descr)) ds.close() for row in sorted(rows, key=lambda row: row[0]): # by calc_id print('#%d %s: %s' % row) return ds = datastore.DataStore(calc_id, mode='r') if key: if key in datastore.view: print(datastore.view(key, ds)) return obj = ds[key] if hasattr(obj, 'value'): # an array print(write_csv(io.StringIO(), obj.value)) else: print(obj) return oq = OqParam.from_(ds.attrs) # this part is experimental if rlzs and 'hcurves' in ds: min_value = 0.01 # used in rmsep curves_by_rlz, mean_curves = get_hcurves_and_means(ds) dists = [] for rlz, curves in curves_by_rlz.items(): dist = sum( rmsep(mean_curves[imt], curves[imt], min_value) for imt in mean_curves.dtype.fields) dists.append((dist, rlz)) print('Realizations in order of distance from the mean curves') for dist, rlz in sorted(dists): print('%s: rmsep=%s' % (rlz, dist)) else: # print all keys print( oq.calculation_mode, 'calculation (%r) saved in %s contains:' % (oq.description, ds.hdf5path)) for key in ds: print(key, humansize(ds.getsize(key)))