def read_hparams(job_ini): """ :param job_ini: path to a job.ini file :returns: dictionary of hazard parameters """ jobdir = os.path.dirname(job_ini) cp = configparser.ConfigParser() cp.read([job_ini], encoding='utf8') params = {} for sect in cp.sections(): for key, val in cp.items(sect): if key == 'intensity_measure_types_and_levels': key = 'imtls' val = valid.intensity_measure_types_and_levels(val) elif key == 'maximum_distance': val = IntegrationDistance.new(val) elif key == 'sites': val = valid.coordinates(val) elif key.endswith('_file'): val = os.path.join(jobdir, val) else: try: val = ast.literal_eval(val) except (SyntaxError, ValueError): if val == 'true': val = True elif val == 'false': val = False params[key] = val return params
def get_mesh(oqparam): """ Extract the mesh of points to compute from the sites, the sites_csv, or the region. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ global pmap, exposure, gmfs, eids if 'exposure' in oqparam.inputs and exposure is None: # read it only once exposure = get_exposure(oqparam) if oqparam.sites: return geo.Mesh.from_coords(oqparam.sites) elif 'sites' in oqparam.inputs: csv_data = open(oqparam.inputs['sites'], 'U').readlines() has_header = csv_data[0].startswith('site_id') if has_header: # strip site_id data = [] for i, line in enumerate(csv_data[1:]): row = line.replace(',', ' ').split() sid = row[0] if sid != str(i): raise InvalidFile('%s: expected site_id=%d, got %s' % (oqparam.inputs['sites'], i, sid)) data.append(' '.join(row[1:])) elif 'gmfs' in oqparam.inputs: raise InvalidFile('Missing header in %(sites)s' % oqparam.inputs) else: data = [line.replace(',', ' ') for line in csv_data] coords = valid.coordinates(','.join(data)) start, stop = oqparam.sites_slice c = coords[start:stop] if has_header else sorted(coords[start:stop]) # TODO: sort=True below would break a lot of tests :-( return geo.Mesh.from_coords(c, sort=False) elif 'hazard_curves' in oqparam.inputs: fname = oqparam.inputs['hazard_curves'] if fname.endswith('.csv'): mesh, pmap = get_pmap_from_csv(oqparam, fname) elif fname.endswith('.xml'): mesh, pmap = get_pmap_from_nrml(oqparam, fname) else: raise NotImplementedError('Reading from %s' % fname) return mesh elif 'gmfs' in oqparam.inputs: eids, gmfs = _get_gmfs(oqparam) # sets oqparam.sites return geo.Mesh.from_coords(oqparam.sites) elif oqparam.region and oqparam.region_grid_spacing: poly = geo.Polygon.from_wkt(oqparam.region) try: mesh = poly.discretize(oqparam.region_grid_spacing) return geo.Mesh.from_coords(zip(mesh.lons, mesh.lats)) except Exception: raise ValueError( 'Could not discretize region %(region)s with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) elif 'exposure' in oqparam.inputs: return exposure.mesh
def get_mesh(oqparam): """ Extract the mesh of points to compute from the sites, the sites_csv, or the region. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ if oqparam.sites: return geo.Mesh.from_coords(sorted(oqparam.sites)) elif 'sites' in oqparam.inputs: csv_data = open(oqparam.inputs['sites'], 'U').readlines() has_header = csv_data[0].startswith('site_id') if has_header: # strip site_id data = [] for i, line in enumerate(csv_data[1:]): row = line.replace(',', ' ').split() sid = row[0] if sid != str(i): raise InvalidFile('%s: expected site_id=%d, got %s' % ( oqparam.inputs['sites'], i, sid)) data.append(' '.join(row[1:])) elif oqparam.calculation_mode == 'gmf_ebrisk': raise InvalidFile('Missing header in %(sites)s' % oqparam.inputs) else: data = [line.replace(',', ' ') for line in csv_data] coords = valid.coordinates(','.join(data)) start, stop = oqparam.sites_slice c = coords[start:stop] if has_header else sorted(coords[start:stop]) return geo.Mesh.from_coords(c) elif oqparam.region: # close the linear polygon ring by appending the first # point to the end firstpoint = geo.Point(*oqparam.region[0]) points = [geo.Point(*xy) for xy in oqparam.region] + [firstpoint] try: mesh = geo.Polygon(points).discretize(oqparam.region_grid_spacing) lons, lats = zip(*sorted(zip(mesh.lons, mesh.lats))) return geo.Mesh(numpy.array(lons), numpy.array(lats)) except: raise ValueError( 'Could not discretize region %(region)s with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) elif oqparam.hazard_calculation_id: sitecol = datastore.read(oqparam.hazard_calculation_id)['sitecol'] return geo.Mesh(sitecol.lons, sitecol.lats, sitecol.depths) elif 'exposure' in oqparam.inputs: # the mesh is extracted from get_sitecol_assetcol return elif 'site_model' in oqparam.inputs: coords = [(param.lon, param.lat, param.depth) for param in get_site_model(oqparam)] mesh = geo.Mesh.from_coords(sorted(coords)) mesh.from_site_model = True return mesh
def get_gmfs_from_txt(oqparam, fname): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fname: the full path of the CSV file :returns: a composite array of shape (N, R) read from a CSV file with format `etag indices [gmv1 ... gmvN] * num_imts` """ with open(fname) as csvfile: firstline = next(csvfile) try: coords = valid.coordinates(firstline) except: raise InvalidFile( 'The first line of %s is expected to contain comma separated' 'ordered coordinates, got %s instead' % (fname, firstline)) lons, lats, depths = zip(*coords) sitecol = site.SiteCollection.from_points(lons, lats, depths, oqparam) if not oqparam.imtls: oqparam.set_risk_imtls(get_risk_models(oqparam)) imts = list(oqparam.imtls) imt_dt = numpy.dtype([(imt, F32) for imt in imts]) num_gmfs = oqparam.number_of_ground_motion_fields gmf_by_imt = numpy.zeros((num_gmfs, len(sitecol)), imt_dt) etags = [] for lineno, line in enumerate(csvfile, 2): row = line.split(',') try: indices = list(map(valid.positiveint, row[1].split())) except: raise InvalidFile( 'The second column in %s is expected to contain integer ' 'indices, got %s' % (fname, row[1])) r_sites = (sitecol if not indices else site.FilteredSiteCollection( indices, sitecol)) for i in range(len(imts)): try: array = numpy.array(valid.positivefloats(row[i + 2])) # NB: i + 2 because the first 2 fields are etag and indices except: raise InvalidFile( 'The column #%d in %s is expected to contain positive ' 'floats, got %s instead' % (i + 3, fname, row[i + 2])) gmf_by_imt[imts[i]][lineno - 2][r_sites.sids] = array etags.append(row[0]) if lineno < num_gmfs + 1: raise InvalidFile('%s contains %d rows, expected %d' % (fname, lineno, num_gmfs + 1)) if etags != sorted(etags): raise InvalidFile('The etags in %s are not ordered: %s' % (fname, etags)) return sitecol, numpy.array([encode(e) for e in etags]), gmf_by_imt.T
def get_mesh(oqparam): """ Extract the mesh of points to compute from the sites, the sites_csv, or the region. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ if oqparam.sites: return geo.Mesh.from_coords(oqparam.sites) elif 'sites' in oqparam.inputs: csv_data = open(oqparam.inputs['sites'], 'U').read() coords = valid.coordinates(csv_data.strip().replace(',', ' ').replace( '\n', ',')) start, stop = oqparam.sites_slice return geo.Mesh.from_coords(coords[start:stop]) elif oqparam.region: # close the linear polygon ring by appending the first # point to the end firstpoint = geo.Point(*oqparam.region[0]) points = [geo.Point(*xy) for xy in oqparam.region] + [firstpoint] try: mesh = geo.Polygon(points).discretize(oqparam.region_grid_spacing) lons, lats = zip(*sorted(zip(mesh.lons, mesh.lats))) return geo.Mesh(numpy.array(lons), numpy.array(lats)) except: raise ValueError( 'Could not discretize region %(region)s with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) elif 'gmfs' in oqparam.inputs: return get_gmfs(oqparam)[0].mesh elif oqparam.hazard_calculation_id: sitecol = datastore.read(oqparam.hazard_calculation_id)['sitecol'] return geo.Mesh(sitecol.lons, sitecol.lats, sitecol.depths) elif 'exposure' in oqparam.inputs: # the mesh is extracted from get_sitecol_assetcol return elif 'site_model' in oqparam.inputs: coords = [(param.lon, param.lat, param.depth) for param in get_site_model(oqparam)] mesh = geo.Mesh.from_coords(coords) mesh.from_site_model = True return mesh
def get_mesh(oqparam, h5=None): """ Extract the mesh of points to compute from the sites, the sites_csv, the region, the site model, the exposure in this order. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ global pmap, exposure, gmfs, eids if 'exposure' in oqparam.inputs and exposure is None: # read it only once exposure = get_exposure(oqparam) if oqparam.sites: return geo.Mesh.from_coords(oqparam.sites) elif 'sites' in oqparam.inputs: fname = oqparam.inputs['sites'] header = get_csv_header(fname) if 'lon' in header: data = [] for i, row in enumerate( csv.DictReader(open(fname, encoding='utf-8-sig'))): if header[0] == 'site_id' and row['site_id'] != str(i): raise InvalidFile('%s: expected site_id=%d, got %s' % ( fname, i, row['site_id'])) data.append(' '.join([row['lon'], row['lat']])) elif 'gmfs' in oqparam.inputs: raise InvalidFile('Missing header in %(sites)s' % oqparam.inputs) else: data = [line.replace(',', ' ') for line in open(fname, encoding='utf-8-sig')] coords = valid.coordinates(','.join(data)) start, stop = oqparam.sites_slice c = (coords[start:stop] if header[0] == 'site_id' else sorted(coords[start:stop])) # NB: Notice the sort=False below # Calculations starting from ground motion fields input by the user # require at least two input files related to the gmf data: # 1. A sites.csv file, listing {site_id, lon, lat} tuples # 2. A gmfs.csv file, listing {event_id, site_id, gmv[IMT1], # gmv[IMT2], ...} tuples # The site coordinates defined in the sites file do not need to be in # sorted order. # We must only ensure uniqueness of the provided site_ids and # coordinates. # When creating the site mesh from the site coordinates read from # the csv file, the sort=False flag maintains the user-specified # site_ids instead of reassigning them after sorting. return geo.Mesh.from_coords(c, sort=False) elif 'hazard_curves' in oqparam.inputs: fname = oqparam.inputs['hazard_curves'] if isinstance(fname, list): # for csv mesh, pmap = get_pmap_from_csv(oqparam, fname) else: raise NotImplementedError('Reading from %s' % fname) return mesh elif oqparam.region_grid_spacing: if oqparam.region: poly = geo.Polygon.from_wkt(oqparam.region) elif exposure: # in case of implicit grid the exposure takes precedence over # the site model poly = exposure.mesh.get_convex_hull() elif 'site_model' in oqparam.inputs: # this happens in event_based/case_19, where there is an implicit # grid over the site model sm = get_site_model(oqparam) # do not store in h5! poly = geo.Mesh(sm['lon'], sm['lat']).get_convex_hull() else: raise InvalidFile('There is a grid spacing but not a region, ' 'nor a site model, nor an exposure in %s' % oqparam.inputs['job_ini']) try: logging.info('Inferring the hazard grid') mesh = poly.dilate(oqparam.region_grid_spacing).discretize( oqparam.region_grid_spacing) return geo.Mesh.from_coords(zip(mesh.lons, mesh.lats)) except Exception: raise ValueError( 'Could not discretize region with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) # the site model has the precedence over the exposure, see the # discussion in https://github.com/gem/oq-engine/pull/5217 elif 'site_model' in oqparam.inputs: logging.info('Extracting the hazard sites from the site model') sm = get_site_model(oqparam) if h5: h5['site_model'] = sm mesh = geo.Mesh(sm['lon'], sm['lat']) return mesh elif 'exposure' in oqparam.inputs: return exposure.mesh
def __init__(self, **names_vals): if '_job_id' in names_vals: # assume most attributes already validated vars(self).update(names_vals) if 'hazard_calculation_id' in names_vals: self.hazard_calculation_id = int( names_vals['hazard_calculation_id']) if 'maximum_distance' in names_vals: self.maximum_distance = valid.MagDepDistance.new( str(names_vals['maximum_distance'])) if 'pointsource_distance' in names_vals: self.pointsource_distance = valid.MagDepDistance.new( str(names_vals['pointsource_distance'])) if 'region_constraint' in names_vals: self.region = valid.wkt_polygon( names_vals['region_constraint']) if 'minimum_magnitude' in names_vals: self.minimum_magnitude = valid.floatdict( str(names_vals['minimum_magnitude'])) if 'minimum_intensity' in names_vals: self.minimum_intensity = valid.floatdict( str(names_vals['minimum_intensity'])) if 'sites' in names_vals: self.sites = valid.coordinates(names_vals['sites']) return # support legacy names for name in list(names_vals): if name == 'quantile_hazard_curves': names_vals['quantiles'] = names_vals.pop(name) elif name == 'mean_hazard_curves': names_vals['mean'] = names_vals.pop(name) elif name == 'max': names_vals['max'] = names_vals.pop(name) super().__init__(**names_vals) if 'job_ini' not in self.inputs: self.inputs['job_ini'] = '<in-memory>' job_ini = self.inputs['job_ini'] if 'calculation_mode' not in names_vals: raise InvalidFile('Missing calculation_mode in %s' % job_ini) if 'region_constraint' in names_vals: if 'region' in names_vals: raise InvalidFile('You cannot have both region and ' 'region_constraint in %s' % job_ini) logging.warning( 'region_constraint is obsolete, use region instead') self.region = valid.wkt_polygon( names_vals.pop('region_constraint')) self.risk_investigation_time = (self.risk_investigation_time or self.investigation_time) self.collapse_level = int(self.collapse_level) if ('intensity_measure_types_and_levels' in names_vals and 'intensity_measure_types' in names_vals): logging.warning('Ignoring intensity_measure_types since ' 'intensity_measure_types_and_levels is set') if 'iml_disagg' in names_vals: self.iml_disagg.pop('default') # normalize things like SA(0.10) -> SA(0.1) self.iml_disagg = { str(from_string(imt)): val for imt, val in self.iml_disagg.items() } self.hazard_imtls = self.iml_disagg if 'intensity_measure_types_and_levels' in names_vals: raise InvalidFile( 'Please remove the intensity_measure_types_and_levels ' 'from %s: they will be inferred from the iml_disagg ' 'dictionary' % job_ini) elif 'intensity_measure_types_and_levels' in names_vals: self.hazard_imtls = self.intensity_measure_types_and_levels delattr(self, 'intensity_measure_types_and_levels') lens = set(map(len, self.hazard_imtls.values())) if len(lens) > 1: dic = {imt: len(ls) for imt, ls in self.hazard_imtls.items()} raise ValueError( 'Each IMT must have the same number of levels, instead ' 'you have %s' % dic) elif 'intensity_measure_types' in names_vals: self.hazard_imtls = dict.fromkeys(self.intensity_measure_types) if 'maximum_intensity' in names_vals: minint = self.minimum_intensity or {'default': 1E-2} for imt in self.hazard_imtls: i1 = calc.filters.getdefault(minint, imt) i2 = calc.filters.getdefault(self.maximum_intensity, imt) self.hazard_imtls[imt] = list(valid.logscale(i1, i2, 20)) delattr(self, 'intensity_measure_types') self._risk_files = get_risk_files(self.inputs) if self.hazard_precomputed() and self.job_type == 'risk': self.check_missing('site_model', 'debug') self.check_missing('gsim_logic_tree', 'debug') self.check_missing('source_model_logic_tree', 'debug') # check investigation_time if (self.investigation_time and self.calculation_mode.startswith('scenario')): raise ValueError('%s: there cannot be investigation_time in %s' % (self.inputs['job_ini'], self.calculation_mode)) # check the gsim_logic_tree if self.inputs.get('gsim_logic_tree'): if self.gsim != '[FromFile]': raise InvalidFile('%s: if `gsim_logic_tree_file` is set, there' ' must be no `gsim` key' % job_ini) path = os.path.join(self.base_path, self.inputs['gsim_logic_tree']) gsim_lt = logictree.GsimLogicTree(path, ['*']) # check the IMTs vs the GSIMs self._trts = set(gsim_lt.values) for gsims in gsim_lt.values.values(): self.check_gsims(gsims) elif self.gsim is not None: self.check_gsims([valid.gsim(self.gsim, self.base_path)]) # check inputs unknown = set(self.inputs) - self.KNOWN_INPUTS if unknown: raise ValueError('Unknown key %s_file in %s' % (unknown.pop(), self.inputs['job_ini'])) # checks for disaggregation if self.calculation_mode == 'disaggregation': if not self.poes_disagg and self.poes: self.poes_disagg = self.poes elif not self.poes and self.poes_disagg: self.poes = self.poes_disagg elif self.poes != self.poes_disagg: raise InvalidFile( 'poes_disagg != poes: %s!=%s in %s' % (self.poes_disagg, self.poes, self.inputs['job_ini'])) if not self.poes_disagg and not self.iml_disagg: raise InvalidFile('poes_disagg or iml_disagg must be set ' 'in %(job_ini)s' % self.inputs) elif self.poes_disagg and self.iml_disagg: raise InvalidFile( '%s: iml_disagg and poes_disagg cannot be set ' 'at the same time' % job_ini) for k in ('mag_bin_width', 'distance_bin_width', 'coordinate_bin_width', 'num_epsilon_bins'): if k not in vars(self): raise InvalidFile('%s must be set in %s' % (k, job_ini)) if self.disagg_outputs and not any('Eps' in out for out in self.disagg_outputs): self.num_epsilon_bins = 1 if (self.rlz_index is not None and self.num_rlzs_disagg is not None): raise InvalidFile('%s: you cannot set rlzs_index and ' 'num_rlzs_disagg at the same time' % job_ini) # checks for classical_damage if self.calculation_mode == 'classical_damage': if self.conditional_loss_poes: raise InvalidFile('%s: conditional_loss_poes are not defined ' 'for classical_damage calculations' % job_ini) # checks for event_based_risk if (self.calculation_mode == 'event_based_risk' and self.asset_correlation not in (0, 1)): raise ValueError('asset_correlation != {0, 1} is no longer' ' supported') # checks for ebrisk if self.calculation_mode == 'ebrisk': if self.risk_investigation_time is None: raise InvalidFile('Please set the risk_investigation_time in' ' %s' % job_ini) # check for GMFs from file if (self.inputs.get('gmfs', '').endswith('.csv') and 'sites' not in self.inputs and self.sites is None): raise InvalidFile('%s: You forgot sites|sites_csv' % job_ini) elif self.inputs.get('gmfs', '').endswith('.xml'): raise InvalidFile('%s: GMFs in XML are not supported anymore' % job_ini) # checks for event_based if 'event_based' in self.calculation_mode: if self.ses_per_logic_tree_path >= TWO32: raise ValueError('ses_per_logic_tree_path too big: %d' % self.ses_per_logic_tree_path) if self.number_of_logic_tree_samples >= TWO16: raise ValueError('number_of_logic_tree_samples too big: %d' % self.number_of_logic_tree_samples) # check grid + sites if self.region_grid_spacing and ('sites' in self.inputs or self.sites): raise ValueError('You are specifying grid and sites at the same ' 'time: which one do you want?') # check for amplification if ('amplification' in self.inputs and self.imtls and self.calculation_mode in ['classical', 'classical_risk', 'disaggregation']): check_same_levels(self.imtls)
def get_mesh(oqparam): """ Extract the mesh of points to compute from the sites, the sites_csv, or the region. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ global pmap, exposure, gmfs, eids if 'exposure' in oqparam.inputs and exposure is None: # read it only once exposure = get_exposure(oqparam) if oqparam.sites: return geo.Mesh.from_coords(oqparam.sites) elif 'sites' in oqparam.inputs: fname = oqparam.inputs['sites'] header = get_csv_header(fname) if 'lon' in header: data = [] for i, row in enumerate( csv.DictReader(open(fname, encoding='utf-8-sig'))): if header[0] == 'site_id' and row['site_id'] != str(i): raise InvalidFile('%s: expected site_id=%d, got %s' % ( fname, i, row['site_id'])) data.append(' '.join([row['lon'], row['lat']])) elif 'gmfs' in oqparam.inputs: raise InvalidFile('Missing header in %(sites)s' % oqparam.inputs) else: data = [line.replace(',', ' ') for line in open(fname, encoding='utf-8-sig')] coords = valid.coordinates(','.join(data)) start, stop = oqparam.sites_slice c = (coords[start:stop] if header[0] == 'site_id' else sorted(coords[start:stop])) return geo.Mesh.from_coords(c) elif 'hazard_curves' in oqparam.inputs: fname = oqparam.inputs['hazard_curves'] if isinstance(fname, list): # for csv mesh, pmap = get_pmap_from_csv(oqparam, fname) else: raise NotImplementedError('Reading from %s' % fname) return mesh elif oqparam.region_grid_spacing: if oqparam.region: poly = geo.Polygon.from_wkt(oqparam.region) elif 'site_model' in oqparam.inputs: sm = get_site_model(oqparam) poly = geo.Mesh(sm['lon'], sm['lat']).get_convex_hull() elif exposure: poly = exposure.mesh.get_convex_hull() else: raise InvalidFile('There is a grid spacing but not a region, ' 'nor a site model, nor an exposure in %s' % oqparam.inputs['job_ini']) try: mesh = poly.dilate(oqparam.region_grid_spacing).discretize( oqparam.region_grid_spacing) return geo.Mesh.from_coords(zip(mesh.lons, mesh.lats)) except Exception: raise ValueError( 'Could not discretize region with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) elif 'exposure' in oqparam.inputs: return exposure.mesh
def get_mesh(oqparam): """ Extract the mesh of points to compute from the sites, the sites_csv, or the region. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ global pmap, exposure, gmfs, eids if 'exposure' in oqparam.inputs and exposure is None: # read it only once exposure = get_exposure(oqparam) if oqparam.sites: return geo.Mesh.from_coords(oqparam.sites) elif 'sites' in oqparam.inputs: fname = oqparam.inputs['sites'] header = get_csv_header(fname) if 'lon' in header: data = [] for i, row in enumerate( csv.DictReader(open(fname, encoding='utf-8-sig'))): if header[0] == 'site_id' and row['site_id'] != str(i): raise InvalidFile('%s: expected site_id=%d, got %s' % (fname, i, row['site_id'])) data.append(' '.join([row['lon'], row['lat']])) elif 'gmfs' in oqparam.inputs: raise InvalidFile('Missing header in %(sites)s' % oqparam.inputs) else: data = [ line.replace(',', ' ') for line in open(fname, encoding='utf-8-sig') ] coords = valid.coordinates(','.join(data)) start, stop = oqparam.sites_slice c = (coords[start:stop] if header[0] == 'site_id' else sorted(coords[start:stop])) return geo.Mesh.from_coords(c) elif 'hazard_curves' in oqparam.inputs: fname = oqparam.inputs['hazard_curves'] if isinstance(fname, list): # for csv mesh, pmap = get_pmap_from_csv(oqparam, fname) else: raise NotImplementedError('Reading from %s' % fname) return mesh elif oqparam.region_grid_spacing: if oqparam.region: poly = geo.Polygon.from_wkt(oqparam.region) elif 'site_model' in oqparam.inputs: sm = get_site_model(oqparam) poly = geo.Mesh(sm['lon'], sm['lat']).get_convex_hull() elif exposure: poly = exposure.mesh.get_convex_hull() else: raise InvalidFile('There is a grid spacing but not a region, ' 'nor a site model, nor an exposure in %s' % oqparam.inputs['job_ini']) try: mesh = poly.dilate(oqparam.region_grid_spacing).discretize( oqparam.region_grid_spacing) return geo.Mesh.from_coords(zip(mesh.lons, mesh.lats)) except Exception: raise ValueError('Could not discretize region with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) elif 'exposure' in oqparam.inputs: return exposure.mesh