def graticule(bounds, step, crs_or_method=None): """ Draw graticules. Args: bounds (tuple): In WGS84 coordinates. step (int): Distance between graticule lines, in the output projection. crs_or_method (str): A projection specification. Returns: A generator that yields GeoJSON-like dicts of graticule features. """ if crs_or_method == 'file': raise errors.SvgisError( "'file' is not a valid option for projecting graticules.") if crs_or_method: out_crs = projection.pick(crs_or_method, bounds=bounds, file_crs=utils.DEFAULT_GEOID) unproject = Transformer.from_crs(utils.DEFAULT_GEOID, out_crs, skip_equivalent=True, always_xy=True) bounds = bounding.transform(bounds, transformer=unproject) else: unproject = Transformer.from_crs(4269, 4269, always_xy=True, skip_equivalent=True) minx, miny, maxx, maxy = bounds minx, miny = utils.modfloor(minx, step), utils.modfloor(miny, step) maxx, maxy = utils.modceil(maxx, step), utils.modceil(maxy, step) frange = partial(utils.frange, cover=True) i = 0 for i, X in enumerate(frange(minx, maxx + step, step), 1): coords = unproject.itransform([ (X, y) for y in frange(miny, maxy + step, step / 2.0) ]) yield _feature(i, coords, axis='x', coord=X) for i, Y in enumerate(frange(miny, maxy + step, step), i + 1): coords = unproject.itransform([ (x, Y) for x in frange(minx, maxx + step, step / 2.0) ]) yield _feature(i, coords, axis='y', coord=Y)
def geo_bounds(geometries): """Calculate bounds in WGS84 coordinates for each geometry. As a faster approximation, only the the bounding coordinates are projected to WGS84 before calculating the outer bounds. Coordinates are rounded to 5 decimal places. Parameters ---------- flowlines : ndarray of pygeos geometries Returns ------- ndarray of (xmin, ymin, xmax, ymax) for each geometry """ transformer = Transformer.from_crs(CRS, GEO_CRS, always_xy=True) xmin, ymin, xmax, ymax = pg.bounds(geometries).T # transform all 4 corners then take min/max x1, y1 = transformer.transform(xmin, ymin) x2, y2 = transformer.transform(xmin, ymax) x3, y3 = transformer.transform(xmax, ymin) x4, y4 = transformer.transform(xmax, ymax) return (np.array([ np.min([x1, x2], axis=0), np.min([y1, y3], axis=0), np.max([x3, x4], axis=0), np.max([y2, y4], axis=0), ]).round(5).astype("float32").T)
def transform(bounds, **kwargs): """ Project a bounding box, taking care to not slice off the sides. Args: bounds (tuple): bounding box to transform. transformer (pyproj.transformer.Transformer): A pyproj Transformer instance. in_crs (dict): Fiona-type proj4 mapping representing input projection. out_crs (dict): Fiona-type proj4 mapping representing output projection. Returns: ``tuple`` """ transformer = kwargs.get('transformer') in_crs = kwargs.get('in_crs') out_crs = kwargs.get('out_crs') if not transformer and not (in_crs and out_crs): raise errors.SvgisError( 'Need input CRS and output CRS or a Transformer') if transformer is None: transformer = Transformer.from_crs(in_crs, out_crs, skip_equivalent=True, always_xy=True) densebounds = ring(bounds) xbounds, ybounds = list(zip(*transformer.itransform(densebounds))) return min(xbounds), min(ybounds), max(xbounds), max(ybounds)
def get_transformer(crs_from, crs_to): """Cache transformer objects""" key = (crs_from, crs_to) if key not in transformers: transformers[key] = Transformer.from_crs(crs_from, crs_to, always_xy=True) return transformers[key]
def get_m_coords(projection, x, y, z): if projection == "EPSG:3857": nw = num2deg(x, y, z) se = num2deg(x + 1, y + 1, z) transformer = Transformer.from_crs("EPSG:4326", projection, always_xy=True) x1, y1 = transformer.transform(nw[1], nw[0]) x2, y2 = transformer.transform(se[1], se[0]) elif projection == "EPSG:32661" or projection == "EPSG:3031": if projection == "EPSG:32661": boundinglat = 60.0 lon_0 = 0 llcrnr_lon = -45 urcrnr_lon = 135 elif projection == "EPSG:3031": boundinglat = -60.0 lon_0 = 0 llcrnr_lon = -135 urcrnr_lon = 45 proj = Proj(projection) xx, yy = proj(lon_0, boundinglat) lon, llcrnr_lat = proj(math.sqrt(2.0) * yy, 0.0, inverse=True) urcrnr_lat = llcrnr_lat urcrnrx, urcrnry = proj(urcrnr_lon, urcrnr_lat) llcrnrx, llcrnry = proj(llcrnr_lon, llcrnr_lat) n = 2**z x_tile = (urcrnrx - llcrnrx) / n y_tile = (urcrnry - llcrnry) / n dx = x_tile / 256 dy = y_tile / 256 x = llcrnrx + x * x_tile + dx * np.indices( (256, 256), np.float32)[0, :, :] y = (llcrnry + (n - y - 1) * y_tile + dy * np.indices( (256, 256), np.float32)[1, :, :]) x = x[:, ::-1] y = y[:, ::-1] return x, y x0 = np.linspace(x1, x2, 256) y0 = np.linspace(y1, y2, 256) return x0, y0
def to_crs(geometries, src_crs, target_crs): """Convert coordinates from one CRS to another CRS. Parameters ---------- geometries : ndarray of pygeos geometries src_crs : CRS or params to create it target_crs : CRS or params to create it """ if src_crs == target_crs: return geometries.copy() transformer = Transformer.from_crs(src_crs, target_crs, always_xy=True) coords = pg.get_coordinates(geometries) new_coords = transformer.transform(coords[:, 0], coords[:, 1]) result = pg.set_coordinates(geometries.copy(), np.array(new_coords).T) return result
def check_coordinates(row): """Return errors for rows that are outside polygons.""" gb_outline = gpd.read_file(GB_OUTLINE).loc[0, 'geometry'] ni_outline = gpd.read_file(NI_OUTLINE).loc[0, 'geometry'] to_irish_grid = Transformer.from_crs("EPSG:27700", "EPSG:29903", always_xy=True) # Check for points within gb_outline if row['geometry'].intersects(gb_outline): return None # Check if points outside gb_outline are in ni_outline ni_grid_geometry = Point(to_irish_grid.transform(row['geometry'].x, row['geometry'].y)) if ni_grid_geometry.intersects(ni_outline): if row['LOCA_GREF']: return None else: return {'line': '-', 'group': 'LOCA', 'desc': f'NATE / NATN in Northern Ireland but LOCA_GREF undefined ({row.name})'} # Otherwise return error return {'line': '-', 'group': 'LOCA', 'desc': f'NATE / NATN outside Great Britain and Northern Ireland ({row.name})'}
def addWKT(self, wkt, epsg): """Add WKT in static map. Parameters: wkt (string) The Well-Known-Text representation of geometry. epsg (int) The WKT CRS. Returns: (obj) The matplotlib plot. """ import pygeos as pg from pyproj.transformer import Transformer geometry = pg.from_wkt(wkt) coords = pg.get_coordinates(geometry) try: transformer = Transformer.from_crs(epsg, 3857, always_xy=True) new_coords = transformer.transform(coords[:, 0], coords[:, 1]) geometry = pg.set_coordinates(geometry, array(new_coords).T) except: raise Exception('Transformation to EPSG:3857 failed.') with warnings.catch_warnings(): warnings.filterwarnings("ignore") minx, miny, maxx, maxy = self._getBorders(new_coords, self.aspect_ratio) fig, ax = plt.subplots(figsize=(self._width, self._height), dpi=self.dpi) ax.set_xlim(minx, maxx) ax.set_ylim(miny, maxy) plt.xticks([], []) plt.yticks([], []) ax.fill(new_coords[0], new_coords[1], facecolor='#50505050', edgecolor='orange', linewidth=3) ctx.add_basemap(ax, source=self.basemap) self.map = fig plt.close(fig) return fig
def parse_points(csvfile: str, src_epsg: int, prj_epsg: int) \ -> Tuple[List[Point],int]: log.info(f'Opening {csvfile}.') csv_file = open(csvfile, 'r') iter_points = csv.reader(csv_file, delimiter=',', quotechar='"') next(iter_points) iter_points = counter(iter_points, 'Parsing point %s.') transformer = Transformer.from_crs(f'epsg:{src_epsg}', f'epsg:{prj_epsg}', always_xy=True, skip_equivalent=True) project = transformer.transform points = [] peek = next(iter_points) iter_points.send(peek) secs = hhmm_to_secs(peek[2]) for uuid, (lat, lon, _, mrt, pet, utci) in enumerate(iter_points): x, y = project(lon, lat) point = Point(uuid, x, y, float(mrt), float(pet), float(utci)) points.append(point) csv_file.close() return points, secs
def export_links(database: SqliteUtil, filepath: str, src_epsg: int, prj_epsg: int): transformer = Transformer.from_crs(f'epsg:{src_epsg}', f'epsg:{prj_epsg}', always_xy=True, skip_equivalent=True) project = transformer.transform prjpath = os.path.splitext(filepath)[0] + '.prj' with open(prjpath, 'w') as prjfile: info = get_wkt_string(prj_epsg) prjfile.write(info) query = ''' SELECT links.link_id, links.source_node, links.terminal_node, links.length, links.freespeed, links.capacity, links.permlanes, links.oneway, links.modes, links.air_temperature, links.mrt_temperature, nodes1.point, nodes2.point FROM links INNER JOIN nodes AS nodes1 ON links.source_node = nodes1.node_id INNER JOIN nodes AS nodes2 ON links.terminal_node = nodes2.node_id; ''' database.cursor.execute(query) rows = database.fetch_rows() rows = counter(rows, 'Exporting link %s.') links = shapefile.Writer(filepath, ) links.field('link_id', 'C') links.field('source_node', 'C') links.field('terminal_node', 'C') links.field('length', 'N') links.field('freespeed', 'N') links.field('capacity', 'N') links.field('permlanes', 'N') links.field('oneway', 'N') links.field('modes', 'C') links.field('air_temperature', 'N') links.field('mrt_temperature', 'N') for row in rows: props = row[:-2] pt1, pt2 = row[-2:] x1, y1 = project(*xy(pt1)) x2, y2 = project(*xy(pt2)) try: links.record(*props) except: print(props) breakpoint() exit() links.line([((x1, y1), (x2, y2))]) if links.recNum != links.shpNum: log.error('Record/shape misalignment; shapefile exporting failure.') raise RuntimeError links.close()
def add_feature(self, feature, target_size, expansion_rate): expansion_rate = float(expansion_rate) target_size = float(target_size) assert target_size > 0. if not isinstance(feature, (LineString, MultiLineString)): raise TypeError( f"feature must be {LineString} or {MultiLineString}") if isinstance(feature, LineString): feature = MultiLineString([feature]) tmpfile = tempfile.NamedTemporaryFile(prefix=str(tmpdir) + '/') meta = self._src._src.meta.copy() if self._src.crs.is_geographic: with rasterio.open(tmpfile.name, 'w', **meta) as dst: for window in self._src.iter_windows(): xy = self._src.get_xy(window) _tx, _ty, zone, _ = utm.from_latlon(xy[:, 1], xy[:, 0]) dst_crs = CRS(proj='utm', zone=zone, ellps='WGS84') transformer = Transformer.from_crs( self._src.crs, dst_crs, always_xy=True) res = [] for linestring in feature: distances = [0] linestring = transform( transformer.transform, linestring) while distances[-1] + target_size < linestring.length: distances.append(distances[-1] + target_size) distances.append(linestring.length) linestring = LineString([ linestring.interpolate(distance) for distance in distances ]) res.extend(linestring.coords) tree = cKDTree(np.vstack(res)) values = tree.query( np.vstack([_tx, _ty]).T, n_jobs=self._nprocs)[0] values = expansion_rate*target_size*values + target_size values = values.reshape( (1, window.height, window.width)).astype(meta['dtype']) if self._hmin is not None: values[np.where(values < self._hmin)] = self._hmin if self._hmax is not None: values[np.where(values > self._hmax)] = self._hmax values = np.minimum( self._src.get_values(window=window), values) dst.write(values, window=window) else: # is not geographic xy = self._src.get_xy(window) _tx, _ty, zone, _ = utm.from_latlon(xy[:, 1], xy[:, 0]) dst_crs = CRS(proj='utm', zone=zone, ellps='WGS84') transformer = Transformer.from_crs( self._src.crs, dst_crs, always_xy=True) res = [] for linestring in feature: distances = [0] linestring = transform( transformer.transform, linestring) while distances[-1] + target_size < linestring.length: distances.append(distances[-1] + target_size) distances.append(linestring.length) linestring = LineString([ linestring.interpolate(distance) for distance in distances ]) res.extend(linestring.coords) tree = cKDTree(np.vstack(res)) with rasterio.open(tmpfile.name, 'w', **meta) as dst: for i, window in enumerate(self._src.iter_windows()): values = tree.query( self._src.get_xy(window), n_jobs=self._nprocs)[0] values = expansion_rate*target_size*values + target_size dst.write( np.minimum( self._src.get_values(window=window), values.reshape( (1, window.height, window.width)) ).astype(meta['dtype']), window=window) self._tmpfile = tmpfile
def __init__(self, src, dst): """ Transformation from src to dst """ self.pre_pipeline = None self.epsg_pipeline = None self.post_pipeline = None src = src.upper() dst = dst.upper() dst_hub = dst if src not in CRS_LIST.keys(): raise ValueError(f"Unknown source CRS identifier: '{src}'") if dst not in CRS_LIST.keys(): raise ValueError(f"Unknown destination CRS identifier: '{dst}'") src_region = CRS_LIST[src]["country"] dst_region = CRS_LIST[dst]["country"] if src_region != dst_region and "Global" not in (src_region, dst_region): raise ValueError("CRS's are not compatible across countries") # determine region of transformation if src_region == dst_region: region = AOI[src_region] elif src_region == "Global": region = AOI[dst_region] else: region = AOI[src_region] src_auth = src.split(":")[0] dst_auth = dst.split(":")[0] # determine which transformation stops to do along the way non_epsg_src = src_auth != "EPSG" non_epsg_dst = dst_auth != "EPSG" if non_epsg_src: pipeline = (f"+proj=pipeline " f"+step +inv +init={src} " f"+step +proj=unitconvert +xy_in=rad +xy_out=deg " f"+step +proj=axisswap +order=2,1") self.pre_pipeline = Transformer.from_pipeline(pipeline) if src_auth == "DK": src = "EPSG:4258" # standard case, which handles all transformations between # CRS's that are both EPSG SRID's AND which handles transformations # where ONE of the two CRS's is a non-EPSG SRID by supplying a # transformation hub using ETRS89 or GR96 if src != dst or non_epsg_src != non_epsg_dst: if dst_auth == "DK": dst_hub = "EPSG:4258" if dst_auth == "GL": dst_hub = "EPSG:4909" try: self.epsg_pipeline = Transformer.from_crs( src, dst_hub, area_of_interest=region) except RuntimeError as e: raise ValueError("Invalid CRS identifier") if non_epsg_dst: pipeline = (f"+proj=pipeline " f"+step +proj=axisswap +order=2,1 " f"+step +proj=unitconvert +xy_in=deg +xy_out=rad " f"+step +init={dst}") self.post_pipeline = Transformer.from_pipeline(pipeline)
def parse_parcels(database: SqliteUtil, residence_file: str, commerce_file: str, parcel_file: str, cooling_file: str, src_epsg: int, prj_epsg: int): boundaries = {} cooling = {} parcels = [] apns = set() transformer = Transformer.from_crs(f'epsg:{src_epsg}', f'epsg:{prj_epsg}', always_xy=True, skip_equivalent=True) project = transformer.transform log.info('Allocating tables for parcels.') create_tables(database) log.info('Parsing parcel boudaries from shapefile.') parser = shapefile.Reader(parcel_file) iter_boundaries = counter(iter(parser), 'Parsing parcel boundary %s.') for parcel in iter_boundaries: if len(parcel.shape.points): apn = parcel.record['APN'] points = (project(*pt) for pt in parcel.shape.points) polygon = Polygon(points) boundaries[apn] = polygon parser.close() log.info('Loading cooling information from csv file.') with open(cooling_file, 'r') as open_file: lines = csv.reader(open_file, delimiter=',', quotechar='"') next(lines) for desc, _, cool in lines: cooling[desc] = bool(cool) log.info('Parsing residential parcels from database file.') parser = shapefile.Reader(residence_file) iter_parcels = counter(parser.iterRecords(), 'Parsing residential parcel %s.') for record in iter_parcels: apn = record['APN'] if apn in boundaries and apn not in apn: cool = True polygon = boundaries[apn] parcel = Parcel(apn, 'residential', cool, polygon) parcels.append(parcel) apns.add(apn) parser.close() log.info('Parsing comercial parcels from database file.') parser = shapefile.Reader(commerce_file) iter_parcels = counter(parser.iterRecords(), 'Parsing commercial parcel %s.') for record in iter_parcels: apn = record['APN'] if apn in boundaries and apn not in apns: desc = record['DESCRIPT'] cool = cooling[desc] polygon = boundaries[apn] parcel = Parcel(apn, 'commercial', cool, polygon) parcels.append(parcel) apns.add(apn) parser.close() log.info('Parsing extraneous parcels from shapefile.') other = set(boundaries.keys()) - apns other = counter(other, 'Parsing extraneous parcel %s.') for apn in other: polygon = boundaries[apn] parcel = Parcel(apn, 'other', True, polygon) parcels.append(parcel) def load(): for idx, parcel in enumerate(parcels): pt = parcel.polygon.centroid yield (idx, (pt.x, pt.y, pt.x, pt.y), None) log.info('Building spatial index from parcel data.') index = Index(load()) log.info('Loading network region data.') regions = load_regions(database) log.info('Scanning regions and mapping mazs to parcels.') iter_regions = counter(regions, 'Sacnning region %s.') for region in iter_regions: apn = f'maz-{region.maz}' parcel = Parcel(apn, 'default', True, region.polygon) parcel.maz = region.maz parcels.append(parcel) result = index.intersection(region.polygon.bounds) for idx in result: parcel = parcels[idx] if region.polygon.contains(parcel.polygon.centroid): if parcel.maz is not None: warning = 'Parcel %s is in both region %s and %s' \ '; the latter region will be kept.' log.warning(warning % (parcel.apn, parcel.maz, region.maz)) parcel.maz = region.maz del regions def dump(): for parcel in parcels: yield (parcel.apn, parcel.maz, parcel.kind, int(parcel.cooling), None, None, dumps(parcel.polygon.centroid), dumps(parcel.polygon)) log.info('Writing parsed parcels to database.') database.insert_values('parcels', dump(), 8) database.connection.commit() log.info('Creating indexes on new tables.') create_indexes(database)
def _transform_to_meters(self, coordinate): transformer = Transformer.from_crs(CRS("EPSG:4326"), self.file.crs) x, y = transformer.transform(coordinate.latitude, coordinate.longitude) if self.file.crs != CRS("EPSG:4326"): return y, x return x, y
def _make_crs_transform(from_crs, to_crs, always_xy): return Transformer.from_crs(from_crs, to_crs, always_xy=always_xy).transform
def parse_roads(database: SqliteUtil, networkpath: str, src_epsg: int, prj_epsg: int): log.info('Allocating tables for network links and nodes.') create_tables(database) log.info('Loading network roads file.') network = multiopen(networkpath, mode='rb') parser = iter(iterparse(network, events=('start', 'end'))) evt, root = next(parser) transformer = Transformer.from_crs(f'epsg:{src_epsg}', f'epsg:{prj_epsg}', always_xy=True, skip_equivalent=True) project = transformer.transform links = [] nodes = [] count, n = 0, 1 for evt, elem in parser: if evt == 'start': if elem.tag == 'nodes': log.info('Parsing nodes from network file.') count, n = 0, 1 root.clear() elif elem.tag == 'links': if count != n << 1: log.info(f'Parsing node {count}.') log.info('Parsing links from network file.') count, n = 0, 1 root.clear() elif evt == 'end': if elem.tag == 'node': node_id = str(elem.get('id')) x = float(elem.get('x')) y = float(elem.get('y')) x, y = project(x, y) wkt = f'POINT ({x} {y})' nodes.append((node_id, None, wkt)) count += 1 if count == n: log.info(f'Parsing node {count}.') n <<= 1 if count % 100000 == 0: root.clear() elif elem.tag == 'link': source_node = str(elem.get('from')) terminal_node = str(elem.get('to')) links.append( (str(elem.get('id')), source_node, terminal_node, float(elem.get('length')), float(elem.get('freespeed')), float(elem.get('capacity')), float(elem.get('permlanes')), int(elem.get('oneway')), str(elem.get('modes')), None, None)) count += 1 if count == n: log.info(f'Parsing link {count}.') n <<= 1 if count % 100000 == 0: root.clear() if count != n << 1: log.info(f'Parsing link {count}.') network.close() log.info('Writing parsed links and nodes to database.') database.insert_values('nodes', nodes, 3) database.insert_values('links', links, 11) database.connection.commit() log.info('Creating indexes on new tables.') create_indexes(database)
def los_calc( vp: MultiPointParams, input_filename: Union[gdal.Dataset, PathLikeOrStr, DataSetSelector], del_s: float, in_coords_srs=None, out_crs=None, bi=1, ovr_idx=0, threads=0, of=None, backend: ViewshedBackend = None, output_filename=None, operation: Optional[CalcOperation] = None, color_palette: Optional[ColorPaletteOrPathOrStrings] = None, ext_url: Optional[str] = None, mock=False): input_selector = None input_ds = None if isinstance(input_filename, PathLikeOrStr.__args__): input_ds = gdalos_util.open_ds(input_filename, ovr_idx=ovr_idx) elif isinstance(input_filename, DataSetSelector): input_selector = input_filename else: input_ds = input_filename srs_4326 = projdef.get_srs(4326) pjstr_4326 = srs_4326.ExportToProj4() if in_coords_srs is not None: in_coords_srs = projdef.get_proj_string(in_coords_srs) # figure out in_coords_crs_pj for getting the geo_ox, geo_oy if input_selector is None: if in_coords_srs is None: if input_ds is None: input_ds = gdalos_util.open_ds(input_filename, ovr_idx=ovr_idx) in_coords_srs = projdef.get_srs_from_ds(input_ds) else: if in_coords_srs is None: in_coords_srs = pjstr_4326 if isinstance(vp, dict): vp = MultiPointParams.get_object_from_lists_dict(vp) transform_coords_to_4326 = projdef.get_transform(in_coords_srs, pjstr_4326) is_fwd = vp.is_fwd() vp.fix_scalars_and_vectors() o_points = vp.oxy t_points = None if is_fwd else vp.txy obs_tar_shape = (len(o_points), 0 if not t_points else len(t_points)) geo_t = None if transform_coords_to_4326: # todo: use TransformPoints geo_o = transform_coords_to_4326.TransformPoints(o_points) if not is_fwd: geo_t = transform_coords_to_4326.TransformPoints(t_points) else: geo_o = o_points if not is_fwd: geo_t = t_points # select the ds if input_selector is not None: min_x = min_y = math.inf max_x = max_y = -math.inf for x, y in geo_o: if x < min_x: min_x = x if x > max_x: max_x = x if y < min_y: min_y = y if y > max_y: max_y = y if not is_fwd: for x, y in geo_t: if x < min_x: min_x = x if x > max_x: max_x = x if y < min_y: min_y = y if y > max_y: max_y = y input_filename, input_ds = input_selector.get_item_projected((min_x + max_x) / 2, (min_y + max_y) / 2) input_filename = Path(input_filename).resolve() if input_ds is None and not mock: input_ds = gdalos_util.open_ds(input_filename, ovr_idx=ovr_idx) if input_ds is None: raise Exception(f'cannot open input file: {input_filename}') # figure out the input, output and intermediate srs # the intermediate srs will be used for combining the output rasters, if needed if input_ds is not None: pjstr_input_srs = projdef.get_srs_pj(input_ds) pjstr_output_srs = projdef.get_proj_string(out_crs) if out_crs is not None else \ pjstr_input_srs if input_selector is None else pjstr_4326 if input_selector is None: pjstr_inter_srs = pjstr_input_srs else: pjstr_inter_srs = pjstr_output_srs input_srs = projdef.get_srs_from_ds(input_ds) input_raster_is_projected = input_srs.IsProjected() is_radio = vp.radio_parameters is not None if isinstance(backend, str): backend = ViewshedBackend[backend] if backend == ViewshedBackend.radio and not is_radio: raise Exception('No radio parameters were provided') if backend is None or backend == ViewshedBackend.radio: backend = default_LOSBackend if (not is_radio or input_raster_is_projected) else \ ViewshedBackend.z_rest if ext_url else default_RFBackend if vp.calc_mode is None: raise Exception('calc_mode is None') elif not isinstance(vp.calc_mode, (tuple, list)): vp.calc_mode = [vp.calc_mode] vp.calc_mode = [RadioCalcType[x] if isinstance(x, str) else x for x in vp.calc_mode] backend_requires_projected_ds = backend.requires_projected_ds() if input_raster_is_projected: transform_coords_to_raster = projdef.get_transform(in_coords_srs, pjstr_input_srs) projected_filename = input_filename if transform_coords_to_raster: if is_fwd: vp.ox = np.array(vp.ox, dtype=np.float32) vp.oy = np.array(vp.oy, dtype=np.float32) input_srs = projdef.get_srs(pjstr_input_srs) zone_lon0 = input_srs.GetProjParm('central_meridian') vp.convergence = utm_convergence(vp.ox, vp.oy, zone_lon0) else: t_points = transform_coords_to_raster.TransformPoints(t_points) o_points = transform_coords_to_raster.TransformPoints(o_points) elif backend_requires_projected_ds: raise Exception(f'input raster has to be projected') else: pass # projected_pj = get_projected_pj(geo_o[0][0], geo_o[0][1]) # transform_coords_to_raster = projdef.get_transform(in_coords_srs, projected_pj) # vp.ox, vp.oy, _ = transform_coords_to_raster.TransformPoints(vp.ox, vp.oy) # d = gdalos_extent.transform_resolution_p(transform_coords_to_raster, 10, 10, vp.ox, vp.oy) # extent = GeoRectangle.from_center_and_radius(vp.ox, vp.oy, vp.max_r + d, vp.max_r + d) # # projected_filename = tempfile.mktemp('.tif') # projected_ds = gdalos_trans( # input_ds, out_filename=projected_filename, warp_srs=projected_pj, # extent=extent, return_ds=True, write_info=False, write_spec=False) # if not projected_ds: # raise Exception('input raster projection failed') # # input_ds = projected_ds if not is_fwd: o_points, t_points = gdalos_base.make_pairs(o_points, t_points, vp.ot_fill) vp.oxy = list(o_points) if not is_fwd: vp.txy = list(t_points) output_names = [x.name for x in vp.calc_mode] res = collections.OrderedDict() res['backend'] = backend.name input_names = ['ox', 'oy', 'oz', 'tx', 'ty', 'tz'] if backend == ViewshedBackend.rfmodel: from rfmodel.rfmodel import calc_path_loss_lonlat_multi from tirem.tirem3 import calc_tirem_loss inputs = vp.get_as_rfmodel_params(del_s=del_s) float_res, bool_res = calc_path_loss_lonlat_multi(calc_tirem_loss, input_ds, **inputs) for name in input_names: res[name] = getattr(vp, name) mode_map = dict(PathLoss=1, FreeSpaceLoss=2) for idx, name in enumerate(output_names): if name in mode_map: res[name] = float_res[mode_map[name]] else: res[name] = bool_res elif backend == ViewshedBackend.z_rest: del_s = del_s or get_resolution_meters(input_ds) ox, oy, oz = vp.ox, vp.oy, vp.oz frequency = vp.radio_parameters.frequency polarization = vp.radio_parameters.get_polarization_deg() if not isinstance(ox, Sequence): ox = [ox] if not isinstance(oy, Sequence): oy = [oy] if not isinstance(oz, Sequence): oz = [oz] if not isinstance(frequency, Sequence): frequency = [frequency] if not isinstance(polarization, Sequence): polarization = [polarization] slices = get_calc_slices(ox, oy, oz) k_factor = vp.get_k_factor() # res_tx = [] # res_ty = [] # res_tz = [] res_loss = [] res_los = [] res_freeloss = [] for slice in slices: data = { "kFactor": k_factor, "samplingInterval": del_s, "originPointWKTGeoWGS84": f"POINT({ox[slice.start]}, {oy[slice.start]})", "isfeet1": False, "fernelOrder": 0, "originAntHeight": oz[slice.start], "destPointsRows": [ { "destPointWKTGeoWGS84": f"POINT({tx}, {ty})", "destAntHeight": tz, "isfeet": False, "frequencyMhz": f, "polarizationDeg": p, "rowId": idx + 1 } for idx, (tx, ty, tz, f, p) in enumerate(zip(vp.tx[slice], vp.ty[slice], vp.tz[slice], cycle(frequency[slice]), cycle(polarization[slice]))) ] } data.update(vp.radio_parameters.as_radiobase_params()) try: response = requests.post(ext_url, json=data) res = response.json() except Exception as e: raise Exception(f'Could not connect to {ext_url} ({e})') res = res['operationResult']['pathLossTable'] res = list_of_dict_to_dict_of_lists(res) # res_tx.extend(res['x']) # res_ty.extend(res['y']) # res_tz.extend(res['height']) res_loss.extend(res['medianLoss']) res_los.extend(res['isRFLOS']) # note this distance is 2d, not taking into account the altitude difference dist = calc_dist(ox[slice], oy[slice], vp.tx[slice], vp.ty[slice]) freeloss = calc_free_space_loss(dist, frequency[slice]).tolist() res_freeloss.extend(freeloss) res = { # 'tx': res_tx, 'ty': res_ty, 'tz': res_tz, 'PathLoss': res_loss, 'FreeSpaceLoss': res_freeloss, 'LOSVisRes': res_los} elif backend == ViewshedBackend.talos: ovr_idx = get_ovr_idx(projected_filename, ovr_idx) if vp.is_fwd(): vp.calc_fwd(projected_filename, ovr_idx) inputs = vp.get_as_talos_params() if not mock: if not projected_filename: raise Exception('to use talos backend you need to provide an input filename') from talosgis import talos talos_module_init() dtm_open_err = talos.GS_DtmOpenDTM(str(projected_filename)) if dtm_open_err != 0: raise Exception(f'talos could not open input file {projected_filename}') talos.GS_SetProjectCRSFromActiveDTM() talos.GS_DtmSelectOvle(ovr_idx) talos.GS_DtmSetCalcThreadsCount(threads or 0) talos.GS_SetRefractionCoeff(vp.refraction_coeff) if hasattr(talos, 'GS_SetCalcModule'): talos.GS_SetCalcModule(vp.get_calc_module()) radio_params = vp.get_radio_as_talos_params() if radio_params: talos_radio_init() # multi_radio_params = dict_of_reduce_if_same(radio_params) # if multi_radio_params: # # raise Exception('unsupported multiple radio parameters') # talos.GS_SetRadioParameters(**radio_params) # result = talos.GS_Radio_Calc(**inputs) # else: dict_of_selected_items(radio_params, index=0) talos.GS_SetRadioParameters(**radio_params) result = talos.GS_Radio_Calc(**inputs) if result: raise Exception('talos calc error') float_res = inputs['AIO_re'] float_res = [float_res[i] for i in range(len(float_res))] for name in input_names: res[name] = inputs[f'AIO_{name}'] for idx, name in enumerate(output_names): res[name] = float_res[idx] if not is_fwd: geo_o, geo_t = gdalos_base.make_pairs(geo_o, geo_t, vp.ot_fill) vp.oxy = list(geo_o) res['ox'] = vp.ox res['oy'] = vp.oy if not is_fwd: vp.txy = list(geo_t) res['tx'] = vp.tx res['ty'] = vp.ty # transform block point from projected to 4326 transformer = Transformer.from_crs(in_coords_srs, pjstr_input_srs, always_xy=True) if any(b in output_names for b in ['bx', 'by']): res['bx'], res['by'] = transformer.transform(xx=res['bx'], yy=res['by'], direction=TransformDirection.INVERSE) if 'LOSVisRes' in output_names and operation: res = res['LOSVisRes'] los = res.reshape(obs_tar_shape) res = los_operation(los, operation=operation) if color_palette is not None: alts = vp.tz # todo: altitudes need to be absolute (above sea) res = poly_to_czml(res, geo_t, alts, color_palette) else: raise Exception('unknown or unsupported backend {}'.format(backend)) if res is None: raise Exception('error occurred') elif output_filename is not None: os.makedirs(os.path.dirname(str(output_filename)), exist_ok=True) output_filename = Path(output_filename) if of != 'xyz': res['r'] = [input_filename] with open(output_filename, 'w') as outfile: json_dump = {k: v.tolist() if isinstance(v, np.ndarray) else str(v) for k, v in res.items()} json.dump(json_dump, outfile, indent=2) else: xyz = np.stack(res.values()).transpose() np.savetxt(output_filename, xyz, fmt='%s') return res
def process_gps_records(log_files): result: List[str] = [] if platform == "linux": log_folder = home_folder + "/logs/statistics/MCP" elif platform == "win32": log_folder = "Z:\logs\statistics\MCP" p_path = Path(log_folder) if not p_path.exists(): p_path.mkdir(parents=True) for f in log_files: # print(f) log_p = Path(f) year = log_p.stem.split("_")[0] month = log_p.stem.split("_")[1] day = log_p.stem.split("_")[2] file_name = "record_mcp_" + year + "_" + month + "_" + day + ".csv" file_p = p_path / file_name if file_p.exists(): file_p.unlink() with open(file_p, "a+") as target_f: header = ( "Device_id, total # of points, # of convex points, convex area, list of convex points\n" ) target_f.write(header) gps_records = tools.gps_tools.read_Json(f) position_num = 0 for device_id in device_list: points = get_points_list_by_device_id(gps_records, device_id) position_num += len(points) p_list = [] for p in points: p_list.append([p.x, p.y]) points_array = np.array(p_list) # if len(points)!=0 and device_id.upper()=="20635F00C80000E6": # print(device_id,len(points)) # print(points_array) h_list = [] convex_area = 0 if len(points_array) != 0: u = np.unique(points_array, axis=0) # list of the input nodes print(device_id, len(u)) if u.shape[0] >= 3: try: hull = ConvexHull(u) except scipy.spatial.qhull.QhullError as e: print( "Expection : =============================================" ) print("log file name : {}".format(f)) print("device id : {}".format(device_id)) print("unique points list : {}".format(u)) print( "exception message information : \n{} ".format(e)) print( "=========================================================" ) continue # print(111111111111111) # get convex hull points list hx = [] hy = [] # hull.vertices stores the index of the corner of the hull for v in hull.vertices: hx.append(u[v][0]) hy.append(u[v][1]) h_list.append([u[v][0], u[v][1]]) # project lon, lat = zip(*[u[hull.vertices, :]][0]) # pa = Proj("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") # out_crs = '+proj=utm +zone=+13K' # inProj = Proj('+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs') # outProj = Proj(out_crs) trs = Transformer.from_crs( '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs', '+proj=utm +zone=+13k') # print(111111111111111) x, y = trs.transform(lon, lat) # x, y = transform(inProj, outProj, lon, lat) # print(x) # print(y) # calculate the area ob = list(zip(x, y)) # for d in ob: # print(d) # print(ob) convex_area = Polygon(ob).area # square meters # print(convex_area) # print(ob[0]) # print(ob[1:]) # sys.exit() # print("device_id:{} len of points:{} len of convex list:{} convex_area:{}".format(device_id, len(points), # len(h_list), convex_area)) with open(file_p, "a+") as target_f: s = "" for h in h_list: s += "[{};{}]|".format(h[0], h[1]) target_f.write("{},{},{},{},{}\n".format( device_id, len(points), len(h_list), convex_area, s)) log_str = "In the file {}, there are {} records are found including {} POSITION message.".format( f, len(gps_records), position_num) print(log_str) result.append(log_str) dt_string = datetime.now().strftime("%d/%m/%Y %H:%M:%S") str_re = "{} : ================================================================".format( dt_string) print(str_re) result.append(str_re) return result