def get_mesh(oqparam): """ Extract the mesh of points to compute from the sites, the sites_csv, or the region. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ if oqparam.sites: lons, lats = zip(*sorted(oqparam.sites)) return geo.Mesh(numpy.array(lons), numpy.array(lats)) elif 'sites' in oqparam.inputs: csv_data = open(oqparam.inputs['sites'], 'U').read() coords = valid.coordinates(csv_data.strip().replace(',', ' ').replace( '\n', ',')) lons, lats = zip(*sorted(coords)) return geo.Mesh(numpy.array(lons), numpy.array(lats)) elif oqparam.region: # close the linear polygon ring by appending the first # point to the end firstpoint = geo.Point(*oqparam.region[0]) points = [geo.Point(*xy) for xy in oqparam.region] + [firstpoint] try: mesh = geo.Polygon(points).discretize(oqparam.region_grid_spacing) lons, lats = zip(*sorted(zip(mesh.lons, mesh.lats))) return geo.Mesh(numpy.array(lons), numpy.array(lats)) except: raise ValueError( 'Could not discretize region %(region)s with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) elif 'site_model' in oqparam.inputs: coords = [(param.lon, param.lat) for param in get_site_model(oqparam)] lons, lats = zip(*sorted(coords)) return geo.Mesh(numpy.array(lons), numpy.array(lats))
def get_mesh(oqparam): """ Extract the mesh of points to compute from the sites, the sites_csv, or the region. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ if oqparam.sites: return geo.Mesh.from_coords(sorted(oqparam.sites)) elif 'sites' in oqparam.inputs: csv_data = open(oqparam.inputs['sites'], 'U').readlines() has_header = csv_data[0].startswith('site_id') if has_header: # strip site_id data = [] for i, line in enumerate(csv_data[1:]): row = line.replace(',', ' ').split() sid = row[0] if sid != str(i): raise InvalidFile('%s: expected site_id=%d, got %s' % ( oqparam.inputs['sites'], i, sid)) data.append(' '.join(row[1:])) elif oqparam.calculation_mode == 'gmf_ebrisk': raise InvalidFile('Missing header in %(sites)s' % oqparam.inputs) else: data = [line.replace(',', ' ') for line in csv_data] coords = valid.coordinates(','.join(data)) start, stop = oqparam.sites_slice c = coords[start:stop] if has_header else sorted(coords[start:stop]) return geo.Mesh.from_coords(c) elif oqparam.region: # close the linear polygon ring by appending the first # point to the end firstpoint = geo.Point(*oqparam.region[0]) points = [geo.Point(*xy) for xy in oqparam.region] + [firstpoint] try: mesh = geo.Polygon(points).discretize(oqparam.region_grid_spacing) lons, lats = zip(*sorted(zip(mesh.lons, mesh.lats))) return geo.Mesh(numpy.array(lons), numpy.array(lats)) except: raise ValueError( 'Could not discretize region %(region)s with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) elif oqparam.hazard_calculation_id: sitecol = datastore.read(oqparam.hazard_calculation_id)['sitecol'] return geo.Mesh(sitecol.lons, sitecol.lats, sitecol.depths) elif 'exposure' in oqparam.inputs: # the mesh is extracted from get_sitecol_assetcol return elif 'site_model' in oqparam.inputs: coords = [(param.lon, param.lat, param.depth) for param in get_site_model(oqparam)] mesh = geo.Mesh.from_coords(sorted(coords)) mesh.from_site_model = True return mesh
def get_pmap_from_csv(oqparam, fnames): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fnames: a space-separated list of .csv relative filenames :returns: the site mesh and the hazard curves read by the .csv files """ read = functools.partial(hdf5.read_csv, dtypedict={None: float}) imtls = {} dic = {} for wrapper in map(read, fnames): dic[wrapper.imt] = wrapper.array imtls[wrapper.imt] = levels_from(wrapper.dtype.names) oqparam.hazard_imtls = imtls oqparam.set_risk_imts(get_risk_functions(oqparam)) array = wrapper.array mesh = geo.Mesh(array['lon'], array['lat']) num_levels = sum(len(imls) for imls in oqparam.imtls.values()) data = numpy.zeros((len(mesh), num_levels)) level = 0 for im in oqparam.imtls: arr = dic[im] for poe in arr.dtype.names[3:]: data[:, level] = arr[poe] level += 1 for field in ('lon', 'lat', 'depth'): # sanity check numpy.testing.assert_equal(arr[field], array[field]) return mesh, ProbabilityMap.from_array(data, range(len(mesh)))
def get_hcurves_from_nrml(oqparam, fname): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fname: an XML file containing hazard curves :returns: sitecol, curve array """ hcurves_by_imt = {} oqparam.hazard_imtls = imtls = {} for hcurves in nrml.read(fname): imt = hcurves['IMT'] oqparam.investigation_time = hcurves['investigationTime'] if imt == 'SA': imt += '(%s)' % hcurves['saPeriod'] imtls[imt] = ~hcurves.IMLs data = [] for node in hcurves[1:]: xy = ~node.Point.pos poes = ~node.poEs data.append((xy, poes)) data.sort() hcurves_by_imt[imt] = numpy.array([d[1] for d in data]) n = len(hcurves_by_imt[imt]) curves = zero_curves(n, imtls) for imt in imtls: curves[imt] = hcurves_by_imt[imt] lons, lats = [], [] for xy, poes in data: lons.append(xy[0]) lats.append(xy[1]) mesh = geo.Mesh(numpy.array(lons), numpy.array(lats)) sitecol = get_site_collection(oqparam, mesh) return sitecol, curves
def get_pmap_from_nrml(oqparam, fname): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fname: an XML file containing hazard curves :returns: sitecol, curve array """ hcurves_by_imt = {} oqparam.hazard_imtls = imtls = collections.OrderedDict() for hcurves in nrml.read(fname): imt = hcurves['IMT'] oqparam.investigation_time = hcurves['investigationTime'] if imt == 'SA': imt += '(%s)' % hcurves['saPeriod'] imtls[imt] = ~hcurves.IMLs data = sorted((~node.Point.pos, ~node.poEs) for node in hcurves[1:]) hcurves_by_imt[imt] = numpy.array([d[1] for d in data]) lons, lats = [], [] for xy, poes in data: lons.append(xy[0]) lats.append(xy[1]) mesh = geo.Mesh(numpy.array(lons), numpy.array(lats)) sitecol = get_site_collection(oqparam, mesh) num_levels = sum(len(v) for v in imtls.values()) array = numpy.zeros((len(sitecol), num_levels)) imtls = DictArray(imtls) for imt_ in hcurves_by_imt: array[:, imtls.slicedic[imt_]] = hcurves_by_imt[imt_] return sitecol, ProbabilityMap.from_array(array, sitecol.sids)
def get_pmap_from_csv(oqparam, fnames): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fnames: a space-separated list of .csv relative filenames :returns: the site mesh and the hazard curves read by the .csv files """ if not oqparam.imtls: oqparam.set_risk_imtls(get_risk_models(oqparam)) if not oqparam.imtls: raise ValueError('Missing intensity_measure_types_and_levels in %s' % oqparam.inputs['job_ini']) dic = { wrapper.imt: wrapper.array for wrapper in map(writers.read_composite_array, fnames) } array = dic[next(iter(dic))] mesh = geo.Mesh(array['lon'], array['lat']) num_levels = sum(len(imls) for imls in oqparam.imtls.values()) data = numpy.zeros((len(mesh), num_levels)) level = 0 for im in oqparam.imtls: arr = dic[im] for poe in arr.dtype.names[3:]: data[:, level] = arr[poe] level += 1 for field in ('lon', 'lat', 'depth'): # sanity check numpy.testing.assert_equal(arr[field], array[field]) return mesh, ProbabilityMap.from_array(data, range(len(mesh)))
def test_point_depth(self): p = geo.Point(0, 0, 10) mesh = geo.Mesh(numpy.array([0.1, 0.2, 0.3, 0.4]), numpy.array([0., 0., 0., 0.]), depths=None) distances = p.distance_to_mesh(mesh) ed = [14.95470217, 24.38385672, 34.82510666, 45.58826465] numpy.testing.assert_array_almost_equal(distances, ed)
def test_mesh_depth(self): p = geo.Point(0.5, -0.5) mesh = geo.Mesh(numpy.array([0.5, 0.5, 0.5, 0.5]), numpy.array([-0.5, -0.5, -0.5, -0.5]), numpy.array([0., 1., 2., 3.])) distances = p.distance_to_mesh(mesh) ed = [0, 1, 2, 3] numpy.testing.assert_array_almost_equal(distances, ed)
def test_both_topo(self): p = geo.Point(0.5, -0.5, -1) mesh = geo.Mesh(numpy.array([0.5, 0.5, 0.5, 0.5]), numpy.array([-0.5, -0.5, -0.5, -0.5]), numpy.array([-1., -2, -3., -4.])) distances = p.distance_to_mesh(mesh) ed = [0., 1., 2., 3.] numpy.testing.assert_array_almost_equal(distances, ed)
def test_point_topo(self): p = geo.Point(0, 0, -5) mesh = geo.Mesh(numpy.array([0.1, 0.2, 0.3, 0.4]), numpy.array([0., 0., 0., 0.]), depths=None) distances = p.distance_to_mesh(mesh) ed = [12.19192836, 22.79413233, 33.73111403, 44.75812634] numpy.testing.assert_array_almost_equal(distances, ed)
def test_neglect_depths(self): p = geo.Point(0.5, -0.5, 10) mesh = geo.Mesh(numpy.array([0.5, 0.5, 0.5, 0.5]), numpy.array([-0.5, -0.5, -0.5, -0.5]), numpy.array([0., -1., -2., -3.])) distances = p.distance_to_mesh(mesh, with_depths=False) ed = [0, 0, 0, 0] numpy.testing.assert_array_almost_equal(distances, ed)
def test_both_depths(self): p = geo.Point(3, 7, 9) mesh = geo.Mesh(numpy.array([2.9, 2.9, 3., 3., 3.1, 3.1]), numpy.array([7., 7.1, 6.9, 7.1, 6.8, 7.2]), numpy.array([20., 30., 10., 20., 40., 50.])) distances = p.distance_to_mesh(mesh) ed = [15.58225761, 26.19968783, 11.16436819, 15.64107148, 39.71688472, 47.93043417] numpy.testing.assert_array_almost_equal(distances, ed)
def get_mesh(oqparam): """ Extract the mesh of points to compute from the sites, the sites_csv, or the region. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ if oqparam.sites: return geo.Mesh.from_coords(oqparam.sites) elif 'sites' in oqparam.inputs: csv_data = open(oqparam.inputs['sites'], 'U').read() coords = valid.coordinates(csv_data.strip().replace(',', ' ').replace( '\n', ',')) start, stop = oqparam.sites_slice return geo.Mesh.from_coords(coords[start:stop]) elif oqparam.region: # close the linear polygon ring by appending the first # point to the end firstpoint = geo.Point(*oqparam.region[0]) points = [geo.Point(*xy) for xy in oqparam.region] + [firstpoint] try: mesh = geo.Polygon(points).discretize(oqparam.region_grid_spacing) lons, lats = zip(*sorted(zip(mesh.lons, mesh.lats))) return geo.Mesh(numpy.array(lons), numpy.array(lats)) except: raise ValueError( 'Could not discretize region %(region)s with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) elif 'gmfs' in oqparam.inputs: return get_gmfs(oqparam)[0].mesh elif oqparam.hazard_calculation_id: sitecol = datastore.read(oqparam.hazard_calculation_id)['sitecol'] return geo.Mesh(sitecol.lons, sitecol.lats, sitecol.depths) elif 'exposure' in oqparam.inputs: # the mesh is extracted from get_sitecol_assetcol return elif 'site_model' in oqparam.inputs: coords = [(param.lon, param.lat, param.depth) for param in get_site_model(oqparam)] mesh = geo.Mesh.from_coords(coords) mesh.from_site_model = True return mesh
def test_no_depths(self): p = geo.Point(20, 30) mesh = geo.Mesh(numpy.array([[18., 19., 20.,]] * 3), numpy.array([[29.] * 3, [30.] * 3, [31.] * 3]), depths=None) distances = p.distance_to_mesh(mesh, with_depths=False) ed = [[223.21812393, 147.4109544, 111.19492664], [192.59281778, 96.29732568, 0], [221.53723588, 146.77568123, 111.19492664]] numpy.testing.assert_array_almost_equal(distances, ed)
def test_mesh_depth(self): p = geo.Point(0.5, -0.5) mesh = geo.Mesh(numpy.array([0.5, 0.5, 0.5, 0.5]), numpy.array([-0.5, -0.5, -0.5, -0.5]), numpy.array([0., 1., 2., 3.])) closer = p.closer_than(mesh, 0.1) numpy.testing.assert_array_equal(closer, [1, 0, 0, 0]) closer = p.closer_than(mesh, 1.5) numpy.testing.assert_array_equal(closer, [1, 1, 0, 0]) closer = p.closer_than(mesh, 3) numpy.testing.assert_array_equal(closer, [1, 1, 1, 1])
def test_point_depth(self): p = geo.Point(0, 0, 10) mesh = geo.Mesh(numpy.array([0.1, 0.2, 0.3, 0.4]), numpy.array([0., 0., 0., 0.]), depths=None) closer = p.closer_than(mesh, 30) numpy.testing.assert_array_equal(closer, [1, 1, 0, 0]) closer = p.closer_than(mesh, 35) numpy.testing.assert_array_equal(closer, [1, 1, 1, 0]) closer = p.closer_than(mesh, 15) numpy.testing.assert_array_equal(closer, [1, 0, 0, 0])
def test_no_depths(self): p = geo.Point(20, 30) mesh = geo.Mesh(numpy.array([[18., 19., 20., 21., 22.]] * 3), numpy.array([[29] * 5, [30] * 5, [31] * 5]), depths=None) closer = p.closer_than(mesh, 120) self.assertEqual(closer.dtype, bool) ec = [[0, 0, 1, 0, 0], [0, 1, 1, 1, 0], [0, 0, 1, 0, 0]] numpy.testing.assert_array_equal(closer, ec) closer = p.closer_than(mesh, 100) ec = [[0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 0, 0, 0, 0]] numpy.testing.assert_array_equal(closer, ec)
def test_both_depths(self): p = geo.Point(3, 7, 9) mesh = geo.Mesh(numpy.array([2.9, 2.9, 3., 3., 3.1, 3.1]), numpy.array([7., 7.1, 6.9, 7.1, 6.8, 7.2]), numpy.array([20., 30., 10., 20., 40., 50.])) closer = p.closer_than(mesh, 20) numpy.testing.assert_array_equal(closer, [1, 0, 1, 1, 0, 0]) closer = p.closer_than(mesh, 40) numpy.testing.assert_array_equal(closer, [1, 1, 1, 1, 1, 0]) closer = p.closer_than(mesh, 10) numpy.testing.assert_array_equal(closer, [0, 0, 0, 0, 0, 0]) closer = p.closer_than(mesh, 60) numpy.testing.assert_array_equal(closer, [1, 1, 1, 1, 1, 1])
def test_both_topo(self): p = geo.Point(3, 7, -1) mesh = geo.Mesh(numpy.array([2.9, 2.9, 3., 3., 3.1, 3.1]), numpy.array([7., 7.1, 6.9, 7.1, 6.8, 7.2]), numpy.array([-2., -3., -1., -2., -4., -5.])) closer = p.closer_than(mesh, 10) numpy.testing.assert_array_equal(closer, [0, 0, 0, 0, 0, 0]) closer = p.closer_than(mesh, 15) numpy.testing.assert_array_equal(closer, [1, 0, 1, 1, 0, 0]) closer = p.closer_than(mesh, 20) numpy.testing.assert_array_equal(closer, [1, 1, 1, 1, 0, 0]) closer = p.closer_than(mesh, 30) numpy.testing.assert_array_equal(closer, [1, 1, 1, 1, 1, 1])
def _get_sitecol(hparams, req_site_params): """ :param hparams: a dictionary of hazard parameters """ if 'sites' in hparams: sm = unittest.mock.Mock(**hparams) mesh = geo.Mesh.from_coords(hparams['sites']) elif 'site_model_file' in hparams: sm = _get_site_model(hparams['site_model_file'], req_site_params) mesh = geo.Mesh(sm['lon'], sm['lat']) else: raise KeyError('Missing sites or site_model_file') return site.SiteCollection.from_points(mesh.lons, mesh.lats, mesh.depths, sm, req_site_params)
def get_mesh_assets_by_site(oqparam, exposure): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param exposure: an Exposure instance :returns: the exposure `mesh` and a list `assets_by_site` with the same length """ assets_by_loc = groupby(exposure, key=lambda a: a.location) lons, lats = zip(*sorted(assets_by_loc)) mesh = geo.Mesh(numpy.array(lons), numpy.array(lats)) assets_by_site = [] for lonlat in zip(mesh.lons, mesh.lats): assets = assets_by_loc[lonlat] assets_by_site.append(sorted(assets, key=operator.attrgetter('idx'))) return mesh, assets_by_site
def get_sitecol_assets(oqparam, exposure): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: two sequences of the same length: the site collection and an array with the assets per each site, collected by taxonomy """ assets_by_loc = groupby(exposure.assets, key=lambda a: a.location) lons, lats = zip(*sorted(assets_by_loc)) mesh = geo.Mesh(numpy.array(lons), numpy.array(lats)) sitecol = get_site_collection(oqparam, mesh) assets_by_site = [] for lonlat in zip(sitecol.lons, sitecol.lats): assets = assets_by_loc[lonlat] assets_by_site.append(sorted(assets, key=operator.attrgetter('id'))) return sitecol, numpy.array(assets_by_site)
def get_mesh_csvdata(csvfile, imts, num_values, validvalues): """ Read CSV data in the format `IMT lon lat value1 ... valueN`. :param csvfile: a file or file-like object with the CSV data :param imts: a list of intensity measure types :param num_values: dictionary with the number of expected values per IMT :param validvalues: validation function for the values :returns: the mesh of points and the data as a dictionary imt -> array of curves for each site """ number_of_values = dict(zip(imts, num_values)) lon_lats = {imt: set() for imt in imts} data = AccumDict() # imt -> list of arrays check_imt = valid.Choice(*imts) for line, row in enumerate(csv.reader(csvfile, delimiter=' '), 1): try: imt = check_imt(row[0]) lon_lat = valid.longitude(row[1]), valid.latitude(row[2]) if lon_lat in lon_lats[imt]: raise DuplicatedPoint(lon_lat) lon_lats[imt].add(lon_lat) values = validvalues(' '.join(row[3:])) if len(values) != number_of_values[imt]: raise ValueError('Found %d values, expected %d' % (len(values), number_of_values[imt])) except (ValueError, DuplicatedPoint) as err: raise err.__class__('%s: file %s, line %d' % (err, csvfile, line)) data += {imt: [numpy.array(values)]} points = lon_lats.pop(imts[0]) for other_imt, other_points in lon_lats.items(): if points != other_points: raise ValueError('Inconsistent locations between %s and %s' % (imts[0], other_imt)) lons, lats = zip(*sorted(points)) mesh = geo.Mesh(numpy.array(lons), numpy.array(lats)) return mesh, {imt: numpy.array(lst) for imt, lst in data.items()}
def get_sitecol_assetcol(oqparam, exposure): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: the site collection and the asset collection """ assets_by_loc = groupby(exposure.assets, key=lambda a: a.location) lons, lats = zip(*sorted(assets_by_loc)) mesh = geo.Mesh(numpy.array(lons), numpy.array(lats)) sitecol = get_site_collection(oqparam, mesh) assets_by_site = [] for lonlat in zip(sitecol.lons, sitecol.lats): assets = assets_by_loc[lonlat] assets_by_site.append(sorted(assets, key=operator.attrgetter('idx'))) assetcol = riskinput.AssetCollection( assets_by_site, exposure.assets_by_tag, exposure.cost_calculator, oqparam.time_event, time_events=hdf5.array_of_vstr( sorted(exposure.time_events))) return sitecol, assetcol
def convert_multiPointSource(self, node): """ Convert the given node into a MultiPointSource object. :param node: a node with tag multiPointGeometry :returns: a :class:`openquake.hazardlib.source.MultiPointSource` """ geom = node.multiPointGeometry lons, lats = zip(*split_coords_2d(~geom.posList)) msr = valid.SCALEREL[~node.magScaleRel]() return source.MultiPointSource( source_id=node['id'], name=node['name'], tectonic_region_type=node.attrib.get('tectonicRegion'), mfd=self.convert_mfdist(node), magnitude_scaling_relationship=msr, rupture_aspect_ratio=~node.ruptAspectRatio, upper_seismogenic_depth=~geom.upperSeismoDepth, lower_seismogenic_depth=~geom.lowerSeismoDepth, nodal_plane_distribution=self.convert_npdist(node), hypocenter_distribution=self.convert_hpdist(node), mesh=geo.Mesh(F32(lons), F32(lats)), temporal_occurrence_model=self.get_tom(node))
def get_mesh_hcurves(oqparam): """ Read CSV data in the format `lon lat, v1-vN, w1-wN, ...`. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :returns: the mesh of points and the data as a dictionary imt -> array of curves for each site """ imtls = oqparam.imtls lon_lats = set() data = AccumDict() # imt -> list of arrays ncols = len(imtls) + 1 # lon_lat + curve_per_imt ... csvfile = oqparam.inputs['hazard_curves'] for line, row in enumerate(csv.reader(csvfile), 1): try: if len(row) != ncols: raise ValueError('Expected %d columns, found %d' % ncols, len(row)) x, y = row[0].split() lon_lat = valid.longitude(x), valid.latitude(y) if lon_lat in lon_lats: raise DuplicatedPoint(lon_lat) lon_lats.add(lon_lat) for i, imt_ in enumerate(imtls, 1): values = valid.decreasing_probabilities(row[i]) if len(values) != len(imtls[imt_]): raise ValueError('Found %d values, expected %d' % (len(values), len(imtls([imt_])))) data += {imt_: [numpy.array(values)]} except (ValueError, DuplicatedPoint) as err: raise err.__class__('%s: file %s, line %d' % (err, csvfile, line)) lons, lats = zip(*sorted(lon_lats)) mesh = geo.Mesh(numpy.array(lons), numpy.array(lats)) return mesh, {imt: numpy.array(lst) for imt, lst in data.items()}
def get_mesh(oqparam, h5=None): """ Extract the mesh of points to compute from the sites, the sites_csv, the region, the site model, the exposure in this order. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ global pmap, exposure, gmfs, eids if 'exposure' in oqparam.inputs and exposure is None: # read it only once exposure = get_exposure(oqparam) if oqparam.sites: return geo.Mesh.from_coords(oqparam.sites) elif 'sites' in oqparam.inputs: fname = oqparam.inputs['sites'] header = get_csv_header(fname) if 'lon' in header: data = [] for i, row in enumerate( csv.DictReader(open(fname, encoding='utf-8-sig'))): if header[0] == 'site_id' and row['site_id'] != str(i): raise InvalidFile('%s: expected site_id=%d, got %s' % ( fname, i, row['site_id'])) data.append(' '.join([row['lon'], row['lat']])) elif 'gmfs' in oqparam.inputs: raise InvalidFile('Missing header in %(sites)s' % oqparam.inputs) else: data = [line.replace(',', ' ') for line in open(fname, encoding='utf-8-sig')] coords = valid.coordinates(','.join(data)) start, stop = oqparam.sites_slice c = (coords[start:stop] if header[0] == 'site_id' else sorted(coords[start:stop])) # NB: Notice the sort=False below # Calculations starting from ground motion fields input by the user # require at least two input files related to the gmf data: # 1. A sites.csv file, listing {site_id, lon, lat} tuples # 2. A gmfs.csv file, listing {event_id, site_id, gmv[IMT1], # gmv[IMT2], ...} tuples # The site coordinates defined in the sites file do not need to be in # sorted order. # We must only ensure uniqueness of the provided site_ids and # coordinates. # When creating the site mesh from the site coordinates read from # the csv file, the sort=False flag maintains the user-specified # site_ids instead of reassigning them after sorting. return geo.Mesh.from_coords(c, sort=False) elif 'hazard_curves' in oqparam.inputs: fname = oqparam.inputs['hazard_curves'] if isinstance(fname, list): # for csv mesh, pmap = get_pmap_from_csv(oqparam, fname) else: raise NotImplementedError('Reading from %s' % fname) return mesh elif oqparam.region_grid_spacing: if oqparam.region: poly = geo.Polygon.from_wkt(oqparam.region) elif exposure: # in case of implicit grid the exposure takes precedence over # the site model poly = exposure.mesh.get_convex_hull() elif 'site_model' in oqparam.inputs: # this happens in event_based/case_19, where there is an implicit # grid over the site model sm = get_site_model(oqparam) # do not store in h5! poly = geo.Mesh(sm['lon'], sm['lat']).get_convex_hull() else: raise InvalidFile('There is a grid spacing but not a region, ' 'nor a site model, nor an exposure in %s' % oqparam.inputs['job_ini']) try: logging.info('Inferring the hazard grid') mesh = poly.dilate(oqparam.region_grid_spacing).discretize( oqparam.region_grid_spacing) return geo.Mesh.from_coords(zip(mesh.lons, mesh.lats)) except Exception: raise ValueError( 'Could not discretize region with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) # the site model has the precedence over the exposure, see the # discussion in https://github.com/gem/oq-engine/pull/5217 elif 'site_model' in oqparam.inputs: logging.info('Extracting the hazard sites from the site model') sm = get_site_model(oqparam) if h5: h5['site_model'] = sm mesh = geo.Mesh(sm['lon'], sm['lat']) return mesh elif 'exposure' in oqparam.inputs: return exposure.mesh
def get_mesh(oqparam): """ Extract the mesh of points to compute from the sites, the sites_csv, or the region. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ global pmap, exposure, gmfs, eids if 'exposure' in oqparam.inputs and exposure is None: # read it only once exposure = get_exposure(oqparam) if oqparam.sites: return geo.Mesh.from_coords(oqparam.sites) elif 'sites' in oqparam.inputs: fname = oqparam.inputs['sites'] header = get_csv_header(fname) if 'lon' in header: data = [] for i, row in enumerate( csv.DictReader(open(fname, encoding='utf-8-sig'))): if header[0] == 'site_id' and row['site_id'] != str(i): raise InvalidFile('%s: expected site_id=%d, got %s' % (fname, i, row['site_id'])) data.append(' '.join([row['lon'], row['lat']])) elif 'gmfs' in oqparam.inputs: raise InvalidFile('Missing header in %(sites)s' % oqparam.inputs) else: data = [ line.replace(',', ' ') for line in open(fname, encoding='utf-8-sig') ] coords = valid.coordinates(','.join(data)) start, stop = oqparam.sites_slice c = (coords[start:stop] if header[0] == 'site_id' else sorted(coords[start:stop])) return geo.Mesh.from_coords(c) elif 'hazard_curves' in oqparam.inputs: fname = oqparam.inputs['hazard_curves'] if isinstance(fname, list): # for csv mesh, pmap = get_pmap_from_csv(oqparam, fname) else: raise NotImplementedError('Reading from %s' % fname) return mesh elif oqparam.region_grid_spacing: if oqparam.region: poly = geo.Polygon.from_wkt(oqparam.region) elif 'site_model' in oqparam.inputs: sm = get_site_model(oqparam) poly = geo.Mesh(sm['lon'], sm['lat']).get_convex_hull() elif exposure: poly = exposure.mesh.get_convex_hull() else: raise InvalidFile('There is a grid spacing but not a region, ' 'nor a site model, nor an exposure in %s' % oqparam.inputs['job_ini']) try: mesh = poly.dilate(oqparam.region_grid_spacing).discretize( oqparam.region_grid_spacing) return geo.Mesh.from_coords(zip(mesh.lons, mesh.lats)) except Exception: raise ValueError('Could not discretize region with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) elif 'exposure' in oqparam.inputs: return exposure.mesh