def test_to_json_dict(): transformer = Transformer.from_crs(4326, 3857) json_dict = transformer.to_json_dict() assert json_dict["type"] == "Conversion"
def test_always_xy__transformer(): transformer = Transformer.from_crs(2193, 4326, always_xy=True) assert_almost_equal( transformer.transform(1625350, 5504853), (173.29964730317386, -40.60674802693758), )
def test_to_wkt(): transformer = Transformer.from_crs(4326, 3857) assert transformer.to_wkt().startswith( 'CONVERSION["Popular Visualisation Pseudo-Mercator"')
def test_equivalent_crs(): transformer = Transformer.from_crs("epsg:4326", 4326, skip_equivalent=True) assert transformer._transformer.projections_equivalent assert transformer._transformer.projections_exact_same assert transformer._transformer.skip_equivalent
def test_equivalent_crs__different(): transformer = Transformer.from_crs("epsg:4326", 3857, skip_equivalent=True) assert transformer._transformer.skip_equivalent assert not transformer._transformer.projections_equivalent assert not transformer._transformer.projections_exact_same
def test_to_json__pretty__indenation(): transformer = Transformer.from_crs(4326, 3857) json_data = transformer.to_json(pretty=True, indentation=4) assert "Conversion" in json_data assert json_data.startswith('{\n "')
def test_transformer__area_of_interest__invalid(): with pytest.raises(ProjError): Transformer.from_crs(4326, 2964, area_of_interest=(-136.46, 49.0, -60.72, 83.17))
def __init__( self, in_list: CanCreateMultipleGeom, base_mesh: Optional[Mesh] = None, zmin: Optional[float] = None, zmax: Optional[float] = None, nprocs: Optional[int] = None, chunk_size: Optional[int] = None, overlap: Optional[int] = None, verbosity: int = 0, base_shape: Optional[Union[Polygon, MultiPolygon]] = None, base_shape_crs: Union[str, CRS] = 'EPSG:4326' ) -> None: """Initialize geometry collector object Parameters ---------- in_list : list List of objects that from which single geometry object can be created. This includes path to a raster or mesh file as a string, as well as Raster, Mesh or Polygon objects. Note that objects are not copied and currently any clipping that happens during processing will affect the objects pass to the `GeomCollector` constructor. base_mesh : Mesh or None, default=None Base mesh to be used for extracting boundaries of the domain. If not `None` all the input rasters all clipped by `base_mesh` polygon before further processing. This is useful for cases where we'd like to locally refine features of the domain (domain region >> inputs region) or when input rasters are much larger that the domain and we'd like to extract contours only within domain to save on computation. zmin : float or None, default=None Minimum elevation for extracting domain. zmax : float or None, default=None Maximum elevation for extracting domain. nprocs: int or None, default=None Number of processors to use in parallel parts of the collector computation chunk_size: int or None, default=None Chunk size for windowed calculation on rasters overlap: int or None default=None Window overlap for windowed calculation on rasters verbosity: int, default=0, Verbosity of the output base_shape: Polygon or MultiPolygon or None, default=None Similar to `base_mesh`, but instead of calculating the polygon from mesh, directly receive it from the calling code. base_shape_crs: str or CRS, default='EPSG:4326' CRS of the input `base_shape`. """ # TODO: Like hfun collector and ops, later move the geom # combine functionality here and just call it from ops instead # of the other way around # For shapely and potentially mesh geom there's no priority # definition, they are just unioned with whatever the rest # of the input results in # store all the info pass # pass dem tmpfile or store address of each series # (global, patchcontour) to ops and get results and store # in the end combine all results (unary_union) # NOTE: Input Hfuns and their Rasters can get modified # Check nprocs nprocs = -1 if nprocs is None else nprocs nprocs = cpu_count() if nprocs == -1 else nprocs self._elev_info = dict(zmin=zmin, zmax=zmax) self._nprocs = nprocs self._chunk_size = chunk_size self._overlap = overlap self._geom_list = [] self._base_shape = base_shape self._base_shape_crs = CRS.from_user_input(base_shape_crs) # NOTE: Base mesh has to have a crs otherwise MeshGeom throws # exception self._base_mesh = base_mesh self._contour_patch_info_coll = ContourPatchInfoCollector() self._type_chk(in_list) # TODO: CRS considerations -- geom combine doesn't necessarily # return EPSG:4326 (unlike hfun collector msh_t) self._crs = 'EPSG:4326' for in_item in in_list: # Add supports(ext) to each hfun type? if isinstance(in_item, BaseGeom): geom = in_item elif isinstance(in_item, Raster): if self._base_shape: clip_shape = self._base_shape if not self._base_shape_crs.equals(in_item.crs): transformer = Transformer.from_crs( self._base_shape_crs, in_item.crs, always_xy=True) clip_shape = ops.transform( transformer.transform, clip_shape) try: in_item.clip(clip_shape) except ValueError as err: # This raster does not intersect shape _logger.debug(err) continue elif self._base_mesh: try: in_item.clip(self._base_mesh.get_bbox(crs=in_item.crs)) except ValueError as err: # This raster does not intersect shape _logger.debug(err) continue geom = RasterGeom(in_item, **self._elev_info) elif isinstance(in_item, BaseMesh): geom = MeshGeom(in_item) elif isinstance(in_item, str): if in_item.endswith('.tif'): raster = Raster(in_item) if self._base_shape: clip_shape = self._base_shape if not self._base_shape_crs.equals(raster.crs): transformer = Transformer.from_crs( self._base_shape_crs, raster.crs, always_xy=True) clip_shape = ops.transform( transformer.transform, clip_shape) try: in_item.clip(clip_shape) except ValueError as err: # This raster does not intersect shape _logger.debug(err) continue elif self._base_mesh: try: raster.clip(self._base_mesh.get_bbox(crs=raster.crs)) except ValueError as err: # This raster does not intersect shape _logger.debug(err) continue geom = RasterGeom(raster, **self._elev_info) elif in_item.endswith( ('.14', '.grd', '.gr3', '.msh', '.2dm')): geom = MeshGeom(Mesh.open(in_item)) else: raise TypeError("Input file extension not supported!") self._geom_list.append(geom)
DURATIONS = [ '60m', '2h', '3h', '6h', '12h', '24h', '2d', '3d', '4d', '7d', '10d', '20d', '30d', '45d', '60d' ] DATASETS = ['GFDL-CM3', 'NCAR-CCSM4'] TIMESLICES = [('2020', '2049'), ('2050', '2079'), ('2080', '2099')] VARIABLES = ['pf_upper', 'pf', 'pf_lower'] INTERVALS = [2.0, 5.0, 10.0, 25.0, 50.0, 100.0, 200.0, 500.0, 1000.0] if len(sys.argv) != 3: print("usage: point_data.py LONGITUDE LATITUDE", file=sys.stderr) exit(1) lon = float(sys.argv[1]) lat = float(sys.argv[2]) transformer = Transformer.from_crs(4326, 3338, always_xy=True) x, y = transformer.transform(lon, lat) for dataset in DATASETS: for duration in DURATIONS: print(f"Duration: {duration} Dataset: {dataset}") header = (" " + " ".join([str(x).ljust(8) for x in INTERVALS])) print(header) print("=" * len(header)) for ts in TIMESLICES: ts_str = f"{ts[0]}-{ts[1]}" ds = xr.open_dataset( os.path.join( DATADIR,
def geoplot( # noqa C901 gdf_in, geometry_column="geometry", figure=None, figsize=None, title="", xlabel="Longitude", ylabel="Latitude", xlim=None, ylim=None, color="blue", colormap=None, colormap_uselog=False, colormap_range=None, category=None, dropdown=None, slider=None, slider_range=None, slider_name="", show_colorbar=True, colorbar_tick_format=None, xrange=None, yrange=None, hovertool=True, hovertool_columns=[], hovertool_string=None, simplify_shapes=None, tile_provider="CARTODBPOSITRON_RETINA", tile_provider_url=None, tile_attribution="", tile_alpha=1, panning=True, zooming=True, toolbar_location="right", show_figure=True, return_figure=True, return_html=False, legend=True, webgl=True, **kwargs, ): """Doc-String: TODO""" # Imports: import bokeh.plotting from bokeh.layouts import column, row from bokeh.models import ( BasicTicker, BoxZoomTool, ColorBar, ColumnDataSource, GeoJSONDataSource, HoverTool, LinearColorMapper, LogColorMapper, LogTicker, Select, Slider, WheelZoomTool, ) from bokeh.models.callbacks import CustomJS from bokeh.models.widgets import Dropdown from bokeh.palettes import all_palettes from bokeh.plotting import show # Make a copy of the input geodataframe: gdf = gdf_in.copy() # Check layertypes: if type(gdf) != pd.DataFrame: layertypes = [] if "Point" in str(gdf.geom_type.unique()): layertypes.append("Point") if "Line" in str(gdf.geom_type.unique()): layertypes.append("Line") if "Polygon" in str(gdf.geom_type.unique()): layertypes.append("Polygon") if len(layertypes) > 1: raise Exception( f"Can only plot GeoDataFrames/Series with single type of geometry (either Point, Line or Polygon). Provided is a GeoDataFrame/Series with types: {layertypes}" ) else: layertypes = ["Point"] # Get and check provided parameters for geoplot: figure_options = { "title": title, "x_axis_label": xlabel, "y_axis_label": ylabel, "plot_width": 600, "plot_height": 400, "toolbar_location": toolbar_location, "active_scroll": "wheel_zoom", "x_axis_type": "mercator", "y_axis_type": "mercator", "match_aspect": True, } if figsize is not None: width, height = figsize figure_options["plot_width"] = width figure_options["plot_height"] = height if webgl: figure_options["output_backend"] = "webgl" if type(gdf) != pd.DataFrame: # Convert GeoDataFrame to Web Mercator Projection: gdf.to_crs(epsg=3857, inplace=True) # Simplify shapes if wanted: if isinstance(simplify_shapes, numbers.Number): if layertypes[0] in ["Line", "Polygon"]: gdf[geometry_column] = gdf[geometry_column].simplify( simplify_shapes) elif simplify_shapes is not None: raise ValueError( "<simplify_shapes> parameter only accepts numbers or None.") # Check for category, dropdown or slider (choropleth map column): category_options = 0 if category is not None: category_options += 1 category_columns = [category] if dropdown is not None: category_options += 1 category_columns = dropdown if slider is not None: category_options += 1 category_columns = slider if category_options > 1: raise ValueError( "Only one of <category>, <dropdown> or <slider> parameters is allowed to be used at once." ) # Check for category (single choropleth plot): if category is None: pass elif isinstance(category, (list, tuple)): raise ValueError( "For <category>, please provide an existing single column of the GeoDataFrame." ) elif category in gdf.columns: pass else: raise ValueError( f"Could not find column '{category}' in GeoDataFrame. For <category>, please provide an existing single column of the GeoDataFrame." ) # Check for dropdown (multiple choropleth plots via dropdown selection): if dropdown is None: pass elif not isinstance(dropdown, (list, tuple)): raise ValueError( "For <dropdown>, please provide a list/tuple of existing columns of the GeoDataFrame." ) else: for col in dropdown: if col not in gdf.columns: raise ValueError( f"Could not find column '{col}' for <dropdown> in GeoDataFrame. " ) # Check for slider (multiple choropleth plots via slider selection): if slider is None: pass elif not isinstance(slider, (list, tuple)): raise ValueError( "For <slider>, please provide a list/tuple of existing columns of the GeoDataFrame." ) else: for col in slider: if col not in gdf.columns: raise ValueError( f"Could not find column '{col}' for <slider> in GeoDataFrame. " ) if slider_range is not None: if not isinstance(slider_range, Iterable): raise ValueError( "<slider_range> has to be a type that is iterable like list, tuple, range, ..." ) else: slider_range = list(slider_range) if len(slider_range) != len(slider): raise ValueError( "The number of elements in <slider_range> has to be the same as in <slider>." ) steps = [] for i in range(len(slider_range) - 1): steps.append(slider_range[i + 1] - slider_range[i]) if len(set(steps)) > 1: raise ValueError( "<slider_range> has to have equal step size between each elements (like a range-object)." ) else: slider_step = steps[0] slider_start = slider_range[0] slider_end = slider_range[-1] # Check colormap if either <category>, <dropdown> or <slider> is choosen: if category_options == 1: if colormap is None: colormap = blue_colormap elif isinstance(colormap, (tuple, list)): if len(colormap) > 1: pass else: raise ValueError( f"<colormap> only accepts a list/tuple of at least two colors or the name of one of the following predefined colormaps (see also https://bokeh.pydata.org/en/latest/docs/reference/palettes.html ): {list(all_palettes.keys())}" ) elif isinstance(colormap, str): if colormap in all_palettes: colormap = all_palettes[colormap] colormap = colormap[max(colormap.keys())] else: raise ValueError( f"Could not find <colormap> with name {colormap}. The following predefined colormaps are supported (see also https://bokeh.pydata.org/en/latest/docs/reference/palettes.html ): {list(all_palettes.keys())}" ) else: raise ValueError( f"<colormap> only accepts a list/tuple of at least two colors or the name of one of the following predefined colormaps (see also https://bokeh.pydata.org/en/latest/docs/reference/palettes.html ): {list(all_palettes.keys())}" ) else: if isinstance(color, str): colormap = [color] elif color is None: colormap = ["blue"] else: raise ValueError( "<color> has to be a string specifying the fill_color of the map glyph." ) # Check xlim & ylim: if xlim is not None: if isinstance(xlim, (tuple, list)): if len(xlim) == 2: xmin, xmax = xlim for _ in [xmin, xmax]: if not -180 < _ <= 180: raise ValueError( "Limits for x-axis (=Longitude) have to be between -180 and 180." ) if not xmin < xmax: raise ValueError("xmin has to be smaller than xmax.") from pyproj import Transformer transformer = Transformer.from_crs("epsg:4326", "epsg:3857") xmin = transformer.transform(0, xmin)[0] xmax = transformer.transform(0, xmax)[0] figure_options["x_range"] = (xmin, xmax) else: raise ValueError( "Limits for x-axis (=Longitude) have to be of form [xmin, xmax] with values between -180 and 180." ) else: raise ValueError( "Limits for x-axis (=Longitude) have to be of form [xmin, xmax] with values between -180 and 180." ) if ylim is not None: if isinstance(ylim, (tuple, list)): if len(ylim) == 2: ymin, ymax = ylim for _ in [ymin, ymax]: if not -90 < _ <= 90: raise ValueError( "Limits for y-axis (=Latitude) have to be between -90 and 90." ) if not ymin < ymax: raise ValueError("ymin has to be smaller than ymax.") from pyproj import Transformer transformer = Transformer.from_crs("epsg:4326", "epsg:3857") ymin = transformer.transform(ymin, 0)[1] ymax = transformer.transform(ymax, 0)[1] figure_options["y_range"] = (ymin, ymax) else: raise ValueError( "Limits for y-axis (=Latitude) have to be of form [ymin, ymax] with values between -90 and 90." ) else: raise ValueError( "Limits for y-axis (=Latitude) have to be of form [ymin, ymax] with values between -90 and 90." ) # Create Figure to draw: old_layout = None if figure is None: figure_options["x_axis_label"] = (figure_options["x_axis_label"] if figure_options["x_axis_label"] is not None else "Longitute") figure_options["y_axis_label"] = (figure_options["y_axis_label"] if figure_options["y_axis_label"] is not None else "Latitude") p = bokeh.plotting.figure(**figure_options) # Add Tile Source as Background: p = _add_backgroundtile(p, tile_provider, tile_provider_url, tile_attribution, tile_alpha) elif isinstance(figure, type(bokeh.plotting.figure())): p = figure elif isinstance(figure, type(column())): old_layout = figure p = _get_figure(old_layout) else: raise ValueError( "Parameter <figure> has to be of type bokeh.plotting.figure or bokeh.layouts.column." ) for t in p.tools: # Get ridd of zoom on axes: if isinstance(t, WheelZoomTool): t.zoom_on_axis = False # Make sure that box zoom matches aspect: if isinstance(t, BoxZoomTool): t.match_aspect = True # Hide legend if wanted: legend_input = legend if isinstance(legend, str): pass else: legend = "GeoLayer" # Define colormapper: if len(colormap) == 1: kwargs["fill_color"] = colormap[0] elif category is not None: # Check if category column is numerical: if not issubclass(gdf[category].dtype.type, np.number): raise NotImplementedError( f"<category> plot only yet implemented for numerical columns. Column '{category}' is not numerical." ) field = category colormapper_options = {"palette": colormap} if colormap_range is not None: if not isinstance(colormap_range, (tuple, list)): raise ValueError( "<colormap_range> can only be 'None' or a tuple/list of form (min, max)." ) elif len(colormap_range) == 2: colormapper_options["low"] = colormap_range[0] colormapper_options["high"] = colormap_range[1] else: colormapper_options["low"] = gdf[field].min() colormapper_options["high"] = gdf[field].max() if colormap_uselog: colormapper = LogColorMapper(**colormapper_options) else: colormapper = LinearColorMapper(**colormapper_options) kwargs["fill_color"] = {"field": "Colormap", "transform": colormapper} if not isinstance(legend, str): legend = str(field) elif dropdown is not None: # Check if all columns in dropdown selection are numerical: for col in dropdown: if not issubclass(gdf[col].dtype.type, np.number): raise NotImplementedError( f"<dropdown> plot only yet implemented for numerical columns. Column '{col}' is not numerical." ) field = dropdown[0] colormapper_options = {"palette": colormap} if colormap_range is not None: if not isinstance(colormap_range, (tuple, list)): raise ValueError( "<colormap_range> can only be 'None' or a tuple/list of form (min, max)." ) elif len(colormap_range) == 2: colormapper_options["low"] = colormap_range[0] colormapper_options["high"] = colormap_range[1] else: colormapper_options["low"] = gdf[dropdown].min().min() colormapper_options["high"] = gdf[dropdown].max().max() if colormap_uselog: colormapper = LogColorMapper(**colormapper_options) else: colormapper = LinearColorMapper(**colormapper_options) kwargs["fill_color"] = {"field": "Colormap", "transform": colormapper} legend = " " + field elif slider is not None: # Check if all columns in dropdown selection are numerical: for col in slider: if not issubclass(gdf[col].dtype.type, np.number): raise NotImplementedError( f"<slider> plot only yet implemented for numerical columns. Column '{col}' is not numerical." ) field = slider[0] colormapper_options = {"palette": colormap} if colormap_range is not None: if not isinstance(colormap_range, (tuple, list)): raise ValueError( "<colormap_range> can only be 'None' or a tuple/list of form (min, max)." ) elif len(colormap_range) == 2: colormapper_options["low"] = colormap_range[0] colormapper_options["high"] = colormap_range[1] else: colormapper_options["low"] = gdf[slider].min().min() colormapper_options["high"] = gdf[slider].max().max() if colormap_uselog: colormapper = LogColorMapper(**colormapper_options) else: colormapper = LinearColorMapper(**colormapper_options) kwargs["fill_color"] = {"field": "Colormap", "transform": colormapper} if not isinstance(legend, str): legend = "Geolayer" # Check that only hovertool_columns or hovertool_string is used: if isinstance(hovertool_columns, (list, tuple, str)): if len(hovertool_columns) > 0 and hovertool_string is not None: raise ValueError( "Either <hovertool_columns> or <hovertool_string> can be used, but not both at the same time." ) else: raise ValueError( "<hovertool_columns> has to be a list of columns of the GeoDataFrame or the string 'all'." ) if hovertool_string is not None: hovertool_columns = "all" # Check for Hovertool columns: if hovertool: if not isinstance(hovertool_columns, (list, tuple)): if hovertool_columns == "all": hovertool_columns = list( filter(lambda col: col != geometry_column, gdf.columns)) else: raise ValueError( "<hovertool_columns> has to be a list of columns of the GeoDataFrame or the string 'all'." ) elif len(hovertool_columns) == 0: if category is not None: hovertool_columns = [category] elif dropdown is not None: hovertool_columns = dropdown elif slider is not None: hovertool_columns = slider else: hovertool_columns = [] else: for col in hovertool_columns: if col not in gdf.columns: raise ValueError( f"Could not find columns '{col}' in GeoDataFrame. <hovertool_columns> has to be a list of columns of the GeoDataFrame or the string 'all'." ) else: if category is None: hovertool_columns = [] else: hovertool_columns = [category] # Reduce DataFrame to needed columns: if type(gdf) == pd.DataFrame: gdf["Geometry"] = 0 additional_columns = ["x", "y"] else: additional_columns = [geometry_column] for kwarg, value in kwargs.items(): if isinstance(value, Hashable): if value in gdf.columns: additional_columns.append(value) if category_options == 0: gdf = gdf[list(set(hovertool_columns) | set(additional_columns))] else: gdf = gdf[list( set(hovertool_columns) | set(category_columns) | set(additional_columns))] gdf["Colormap"] = gdf[field] field = "Colormap" # Create GeoJSON DataSource for Plot: if type(gdf) != pd.DataFrame: geo_source = GeoJSONDataSource(geojson=gdf.to_json()) else: geo_source = gdf # Draw Glyph on Figure: layout = None if "Point" in layertypes: if "line_color" not in kwargs: kwargs["line_color"] = kwargs["fill_color"] glyph = p.scatter(x="x", y="y", source=geo_source, legend_label=legend, **kwargs) if "Line" in layertypes: if "line_color" not in kwargs: kwargs["line_color"] = kwargs["fill_color"] del kwargs["fill_color"] glyph = p.multi_line(xs="xs", ys="ys", source=geo_source, legend_label=legend, **kwargs) if "Polygon" in layertypes: if "line_color" not in kwargs: kwargs["line_color"] = "black" # Creates from a geoDataFrame with Polygons and Multipolygons a Pandas DataFrame # with x any y columns specifying the geometry of the Polygons: geo_source = ColumnDataSource( convert_geoDataFrame_to_patches(gdf, geometry_column)) # Plot polygons: glyph = p.multi_polygons(xs="__x__", ys="__y__", source=geo_source, legend_label=legend, **kwargs) # Add hovertool: if hovertool and (category_options == 1 or len(hovertool_columns) > 0): my_hover = HoverTool(renderers=[glyph]) if hovertool_string is None: my_hover.tooltips = [(str(col), "@{%s}" % col) for col in hovertool_columns] else: my_hover.tooltips = hovertool_string p.add_tools(my_hover) # Add colorbar: if show_colorbar and category_options == 1: colorbar_options = { "color_mapper": colormapper, "label_standoff": 12, "border_line_color": None, "location": (0, 0), } if colormap_uselog: colorbar_options["ticker"] = LogTicker() if colorbar_tick_format: colorbar_options["formatter"] = get_tick_formatter( colorbar_tick_format) colorbar = ColorBar(**colorbar_options) p.add_layout(colorbar, "right") # Add Dropdown Widget: if dropdown is not None: # Define Dropdown widget: dropdown_widget = Select(title="Select Choropleth Layer", options=list(zip(dropdown, dropdown))) # Define Callback for Dropdown widget: callback = CustomJS( args=dict( dropdown_widget=dropdown_widget, geo_source=geo_source, legend=p.legend[0].items[0], ), code=""" //Change selection of field for Colormapper for choropleth plot: geo_source.data["Colormap"] = geo_source.data[dropdown_widget.value]; geo_source.change.emit(); //Change label of Legend: legend.label["value"] = " " + dropdown_widget.value; """, ) dropdown_widget.js_on_change("value", callback) # Add Dropdown widget above the plot: if old_layout is None: layout = column(dropdown_widget, p) else: layout = column(dropdown_widget, old_layout) # Add Slider Widget: if slider is not None: if slider_range is None: slider_start = 0 slider_end = len(slider) - 1 slider_step = 1 value2name = ColumnDataSource({ "Values": np.arange(slider_start, slider_end + slider_step, slider_step), "Names": slider, }) # Define Slider widget: slider_widget = Slider( start=slider_start, end=slider_end, value=slider_start, step=slider_step, title=slider_name, ) # Define Callback for Slider widget: callback = CustomJS( args=dict( slider_widget=slider_widget, geo_source=geo_source, value2name=value2name, ), code=""" //Change selection of field for Colormapper for choropleth plot: var slider_value = slider_widget.value; var i; for(i=0; i<value2name.data["Names"].length; i++) { if (value2name.data["Values"][i] == slider_value) { var name = value2name.data["Names"][i]; } } geo_source.data["Colormap"] = geo_source.data[name]; geo_source.change.emit(); """, ) slider_widget.js_on_change("value", callback) # Add Slider widget above the plot: if old_layout is None: layout = column(slider_widget, p) else: layout = column(slider_widget, old_layout) # Hide legend if user wants: if legend_input is False: p.legend.visible = False # Set click policy for legend: p.legend.click_policy = "hide" # Set panning option: if panning is False: p.toolbar.active_drag = None # Set zooming option: if zooming is False: p.toolbar.active_scroll = None # Display plot and if wanted return plot: if layout is None: if old_layout is None: layout = p else: layout = old_layout # Display plot if wanted if show_figure: show(layout) # Return as (embeddable) HTML if wanted: if return_html: return embedded_html(layout) # Return plot: if return_figure: return layout
def get_multipolygon(self, **kwargs: Any) -> MultiPolygon: """Returns the `shapely` representation of the geometry Calculates and returns the `MultiPolygon` representation of the geometry. Parameters ---------- **kwargs : dict, optional Currently unused for this class, needed for generic API support Returns ------- MultiPolygon Calculated and merged polygons from all geometry inputs. Notes ----- All calculations are done lazily and the results is **not** cached. During this process all the stored contour extraction specs are applied on all the inputs and then the resulting shapes are merged. Calculation for each DEM and feature is stored on disk as feather files. In the last steps are all these feather files are combined using the out of core calculation by `GeoPandas`. """ # For now we don't need to do any calculations here, the # ops will take care of extracting everything. Later the logic # in ops needs to move here (like hfun collector) # Since raster geoms are stateless, the polygons should be # calculated everytime epsg4326 = CRS.from_user_input("EPSG:4326") mp = None with tempfile.TemporaryDirectory() as temp_dir: feather_files = [] temp_path = Path(temp_dir) base_multipoly = None if self._base_shape: base_multipoly = self._base_shape if not self._base_shape_crs.equals(epsg4326): transformer = Transformer.from_crs( self._base_shape_crs, epsg4326, always_xy=True) base_multipoly = ops.transform( transformer.transform, base_multipoly) elif self._base_mesh: # TODO: Make sure all calcs are in EPSG:4326 base_multipoly = self._base_mesh.hull.multipolygon() feather_files.append(self._extract_global_boundary( temp_path, base_multipoly)) feather_files.extend(self._extract_nonraster_boundary( temp_path, base_multipoly)) feather_files.extend(self._extract_features( temp_path, base_multipoly)) gdf = gpd.GeoDataFrame(columns=['geometry'], crs=epsg4326) for f in feather_files: gdf = gdf.append(gpd.read_feather(f)) mp = gdf.unary_union if isinstance(mp, Polygon): mp = MultiPolygon([mp]) elif not isinstance(mp, MultiPolygon): raise ValueError( "Union of all shapes resulted in invalid geometry" + " type") return mp
@author: tommo """ import streamlit as st from streamlit_folium import folium_static import folium import geopandas as gpd import pandas as pd #import altair as alt from streamlit_folium import folium_static import folium from pyproj import Transformer transformer = Transformer.from_crs("epsg:27700", "epsg:4326") st.markdown( f''' <style> .sidebar .sidebar-content {{ width: 800px; }} </style> ''', unsafe_allow_html=True ) st.write( """ # SCANNER condition survey results
# -*- coding: utf-8 -*- """ Created on Sun Aug 25 11:56:26 2019 @author: Ray """ # importing required libraries import csv import requests, json from pyproj import Transformer letransform = Transformer.from_crs("epsg:3347", "epsg:4326") hospitalcoords = {'Met': {"lat":42.301318,"lng":-82.999226}, 'Megahospital' : { "lat":42.270184,"lng":-82.928170}, 'Ouelette': {"lat":42.308185,"lng":-83.030759}, 'Leamington':{"lat":42.0486634,"lng":-82.6166188}} #reads api key f=open("key.txt", "r") if f.mode == 'r': api_key = f.read() # url variable store url url ='https://maps.googleapis.com/maps/api/distancematrix/json?'
def WGS2UTM(lon, lat): #WGS84转UTM https://epsg.io/32650 transformer = Transformer.from_crs("epsg:4326", "epsg:32650") x, y = transformer.transform(lat, lon) return x, y
def test_equivalent_crs__different(): with pytest.warns(UserWarning): transformer = Transformer.from_crs("epsg:4326", 3857, skip_equivalent=True) assert transformer._transformer.skip_equivalent assert not transformer._transformer.projections_equivalent assert not transformer._transformer.projections_exact_same
Entire project is uploaded to https://github.com/jamescoombs3/ookla """ from pyproj import Transformer # Some pairs of coordinates from gridfinder.com. lat long is to 3dp, E/N to the nearest metre lat, long = 55.023, -1.536 E, N = 429763, 569927 # Define the projection for British National Grid bng = 'epsg:27700' # Define the projection used by most SatNav Google etc. wgs84 = 'epsg:4326' # Define the transformation as from wgs84 (long, lat) to bng(Eastings/Northings) transformer = Transformer.from_crs(wgs84, bng) # Test them out ... e2, n2 = transformer.transform(lat, long) print('expecting coords', E, N) print('returned coords', e2, n2) print('difference is', E - e2, N - n2) # To go the other way first switch the arguments to the transformer transformer = Transformer.from_crs(bng, wgs84) lat2, long2 = transformer.transform(E, N) print('expecting coords', lat, long) print('returned coords', lat2, long2) print('difference is', lat - lat2, long - long2)
def test_repr(from_crs, to_crs, expected_repr): assert repr(Transformer.from_crs(from_crs, to_crs)) == expected_repr
def get_planet_grid(squares, out): """create a grid adapted to the points and to the planet initial grid""" out.add_msg(cm.planet.grid) # get the shape of the aoi in EPSG:3857 proj aoi_shp = unary_union(squares) aoi_gdf = gpd.GeoDataFrame({ 'geometry': [aoi_shp] }, crs="EPSG:4326").to_crs('EPSG:3857') # extract the aoi shape aoi_shp_proj = aoi_gdf['geometry'][0] # retreive the bb aoi_bb = sg.box(*aoi_gdf.total_bounds) # compute the longitude and latitude in the apropriate CRS crs_4326 = CRS.from_epsg(4326) crs_3857 = CRS.from_epsg(3857) crs_bounds = crs_3857.area_of_use.bounds proj = Transformer.from_crs(4326, 3857, always_xy=True) bl = proj.transform(crs_bounds[0], crs_bounds[1]) tr = proj.transform(crs_bounds[2], crs_bounds[3]) # the planet grid is constructing a 2048x2048 grid of SQUARES. # The latitude extends is bigger (20048966.10m VS 20026376.39) so to ensure the "squariness" # Planet lab have based the numerotation and extends of it square grid on the longitude only. # the extreme -90 and +90 band it thus exlucded but there are no forest there so we don't care longitudes = np.linspace(bl[0], tr[0], 2048 + 1) # the planet grid size cut the world in 248 squares vertically and horizontally box_size = (tr[0] - bl[0]) / 2048 # filter with the geometry bounds bb = aoi_gdf.total_bounds # filter lon and lat lon_filter = longitudes[(longitudes > (bb[0] - box_size)) & (longitudes < bb[2] + box_size)] lat_filter = longitudes[(longitudes > (bb[1] - box_size)) & (longitudes < bb[3] + box_size)] # get the index offset x_offset = np.nonzero(longitudes == lon_filter[0])[0][0] y_offset = np.nonzero(longitudes == lat_filter[0])[0][0] # create the grid x = [] y = [] names = [] squares = [] for coords in product(range(len(lon_filter) - 1), range(len(lat_filter) - 1)): # get the x and y index ix = coords[0] iy = coords[1] # fill the grid values x.append(ix + x_offset) y.append(iy + y_offset) names.append(f'L15-{x[-1]:4d}E-{y[-1]:4d}N.tif') squares.append( sg.box(lon_filter[ix], lat_filter[iy], lon_filter[ix + 1], lat_filter[iy + 1])) # create a buffer grid in 3857 grid = gpd.GeoDataFrame( { 'x': x, 'y': y, 'names': names, 'geometry': squares }, crs='EPSG:3857') # cut the grid to the aoi extends mask = grid.intersects(aoi_shp_proj) grid = grid.loc[mask] # project back to 4326 grid_gdf = grid.to_crs('EPSG:4326') return grid_gdf
def test_transformer__operations_missing(): assert Transformer.from_crs(7789, 8401).operations == ()
from datetime import date, timedelta import wradlib.georef as georef ########################## from pyproj import CRS from pyproj import Transformer import pandas as pd ######################### ##################################### archivo con lat long del satelite filename = '/home/arielcg/Documentos/Tesis/src/data/base/geoBase.csv' x = [] y = [] crs = CRS.from_epsg(4326) proj = Transformer.from_crs(crs.geodetic_crs, crs) df = pd.read_csv(filename, delimiter=",") ######################################## loop sobre el satelite lat long for i in range(0, df.shape[0]): a, b = proj.transform(df.Latitude[i], df.Longitude[i]) #radar x.append(a) y.append(b) x = np.array(x) y = np.array(y) az = np.linspace(0, 360, 361)[0:-1] proj = georef.epsg_to_osr(4326)
def test_transformer_not_equals(comparison): assert Transformer.from_crs(28356, 7856) != comparison
# -*- coding: utf-8 -*- from pyproj import Transformer from shapely.geometry import shape, mapping from shapely.ops import transform from shapely.affinity import translate, scale, rotate from shapely import wkt # , wkb import mercantile transformer = Transformer.from_crs("EPSG:4326", "EPSG:3857", always_xy=True) def merc2xy(x, y, z, sgeom): """ Apply some affine transformations to input shapely geometry for coordinates transformation from Mercatore to local tile pixels. sgeom @shapely.geom : Input polygon Returns a brand new shapely polygon in the new coordinates. """ MVT_EXTENT = 4096 X_min, Y_max = mercantile.xy(*mercantile.ul(x, y, z)) X_max, Y_min = mercantile.xy(*mercantile.ul(x + 1, y - 1, z)) geom_3857 = transform(transformer.transform, sgeom) tx, ty = -X_min, -2 * Y_max + Y_min geom_XY = translate(geom_3857, tx, ty) x_scale_factor = MVT_EXTENT / (X_max - X_min) y_scale_factor = -MVT_EXTENT / (Y_max - Y_min) geom_xy = scale(geom_XY, x_scale_factor, y_scale_factor, origin=(0, 0, 0))
def test_equivalent_crs__disabled(): transformer = Transformer.from_crs("epsg:4326", 4326) assert not transformer._transformer.skip_equivalent assert transformer._transformer.projections_equivalent assert transformer._transformer.projections_exact_same
import json, sys, math, statistics from pyproj import CRS, Transformer #priprava prevodu mezi wgs a jtsk crs_wgs = CRS.from_epsg(4326) crs_jtsk = CRS.from_epsg(5514) wgs2jtsk = Transformer.from_crs(crs_wgs, crs_jtsk) def verejne_kont(kontejnery_data): #vyber souradnic verejnych kontejneru kontejnery = [] container_features = kontejnery_data["features"] for container in container_features: #projde vsechny kontejnery properties = container["properties"] pristup = properties["PRISTUP"] if pristup == 'volně': #souradnice volnych kontejneru ulozi do listu 'kontejnery' coordinates = container["geometry"]['coordinates'] kontejnery.append(coordinates) return kontejnery def adresy(adresy_data): adresy_jmena = [] adresy_coord = [] adresy_features = adresy_data["features"] for buliding in adresy_features: #projede vsechny budovy properties = buliding["properties"] coordinates = buliding["geometry"]['coordinates']
# Specify log10_range bins plot_height=20, y_range=[min_log10_range, max_log10_range], ) agg = cvs2.points( cell_towers_ddf, x="created", y="log10_range", agg=ds.count_cat("radio") ) # Set created index back to datetime values agg = agg.assign_coords(created=created_bin_centers) return agg # Coordinate transformations transformer_4326_to_3857 = Transformer.from_crs("epsg:4326", "epsg:3857") transformer_3857_to_4326 = Transformer.from_crs("epsg:3857", "epsg:4326") def epsg_4326_to_3857(coords): return [transformer_4326_to_3857.transform(*reversed(row)) for row in coords] def epsg_3857_to_4326(coords): return [list(reversed(transformer_3857_to_4326.transform(*row))) for row in coords] @retry(wait_exponential_multiplier=100, wait_exponential_max=2000, stop_max_delay=6000) def get_dataset(client, name): return client.get_dataset(name)
import pandas as pd import numpy as np from collections import defaultdict from pyproj import Proj, transform, Transformer from shapely.geometry import Point NUM_DIFFERENT_STOPS_ALLOWED = 3 TRANSFORMER = Transformer.from_crs('EPSG:4326', 'EPSG:2157', always_xy=True) def devide_to_trips(df): trips_object = defaultdict(lambda: defaultdict(lambda: defaultdict( lambda: dict()))) # route - trip - records for attrs, trip_df in df.groupby( ["route_id", "route_short_name", "direction_id", "trip_id"]): (route_id, line, direction, trip_id) = attrs trips_object[route_id][line][direction][trip_id] = trip_df.sort_values( ["stop_sequence"]).reset_index() return trips_object def is_same_route(trip_df1, trip_df2): # Checking if the two trip has the same route # if there's a different stop with the same stop_sequence than the trips are different # to avoid error of jump between stops (from 56 to 58) stop_sequence_dic = defaultdict(lambda: list()) for (i, row1), (j, row2) in zip(trip_df1.iterrows(), trip_df2.iterrows()): stop_sequence_dic[row1["stop_sequence"]].append(row1["stop_id"]) stop_sequence_dic[row2["stop_sequence"]].append(row2["stop_id"]) num_differ_stops = 0 # number of stops the 2 routes are differ for stop_sequence, stop_ids_list in stop_sequence_dic.items():
def test_transform_direction__invalid(): transformer = Transformer.from_crs(4326, 3857) with pytest.raises(ValueError, match="Invalid value"): transformer.transform(-33, 24, direction="WHEREVER")
class GpsShpCtl: shpFileList = [] user_dic = {} transformer = Transformer.from_crs('epsg:4326', 'epsg:5178', always_xy=True) def __init__(self, shpFileList): self.shpFileList = shpFileList def get_shapefile_familynames(self, shpfilename): s = os.path.splitext(shpfilename) shp = s[0] + ".shp" dbf = s[0] + ".dbf" shx = s[0] + ".shx" return shp, dbf, shx def getFileNameInfo(self): p = re.compile(r"\w+_(\d+)_(\d+_\d+)_PT.shp") for filepath in self.shpFileList: file_name = os.path.basename(filepath) ru = p.findall(file_name) uid = int(ru[0][0]) print(uid, ru[0][1]) if self.user_dic.get(uid) == None: self.user_dic[uid] = 1 else: self.user_dic[uid] = self.user_dic[uid] + 1 print("%d, %d" % (uid, self.user_dic[uid])) def IsDeliverer(self): # ** --> recursive dir scan #self.getFileNameInfo() cc = nvconfig.instance() cc._SELECTED_GPS_DIR_PATH for filepath in self.shpFileList: file_name = os.path.basename(filepath) gpslist = gpd.read_file(filepath, encoding='cp949') stay_x, stay_y = 0, 0 stay_t = 0 stay_count = 0 stay_state = 0 #동일한 사용자는 gps파일을 연결하여 분석한다. for index, row in gpslist.iterrows(): # 5분이상 1시간 미만동안 이동거리 50m이내인 곳이 5군데 이상있다면 택배로 간주한다. x, y = self.transformer.transform(row.geometry.x, row.geometry.y) t = datetime.strptime(row.DATETIME, "%Y/%m/%d %H:%M:%S").timestamp() if index <= 0: # last value stay_x, stay_y = x, y stay_t = t continue stay_diff_dist = math.sqrt((x - stay_x)**2 + (y - stay_y)**2) stay_diff_time = t - stay_t #print("%d %d(%d) %.7f %.7f %.2f" % (index, t, diff_time, row.geometry.x, row.geometry.y, dist)) if stay_state == 0: if stay_diff_dist < 50: if stay_diff_time > 300 and stay_diff_time < 1800: # delivered stay_state = 1 #print("%d) %d %.7f,%.7f" % (stay_count, index, row.geometry.x, row.geometry.y)) #print("%f, %f , %f, %f, %f , %f " % (stay_diff_dist, stay_diff_time, stay_x, stay_y, x, y )) stay_x = x stay_y = y stay_t = t stay_count = stay_count + 1 else: # stay_diff_time pass else: # stay_diff_dist >= 50 #stay 기준값 초기화 stay_x = x stay_y = y stay_t = t pass else: # stay_state == 1 : if stay_diff_dist > 50: stay_state = 0 stay_x = x stay_y = y stay_t = t else: pass if stay_count >= 3: print("***** stay %d filename : %s" % (stay_count, file_name)) #file copy shp_fnames = self.get_shapefile_familynames(file_name) for i, name in enumerate(shp_fnames): src_filepath = os.path.join(cc._GPSLOG_FILE_PATH, name) target_filepath = os.path.join(cc._SELECTED_GPS_DIR_PATH, name) shutil.copy(src_filepath, target_filepath) if i == 0: print(src_filepath, target_filepath)
def test_str(): assert str(Transformer.from_crs(4326, 3857)).startswith("proj=pipeline")
epsgOut = 'epsg:4326' # WGS84 outputDims = 2 # C1 = Coords(9.198090266826734,48.78972385690148,265.2137686) # C2 = Coords(9.193484399096668,48.78742827615106,279.9151045) # get_linelength_between_points(C1, C2) # # usecols=[1,2,3,4,5,6,7,8,9])#, names=['x', 'y', 'z','R','G','B','ID','velocity','AverageVelocity']) data = pd.read_csv(inputFile, header='infer') # npd = data.to_numpy() w = open(outputFile, "w", 16000000) tmpString = "" l = len(data.columns) transformer = Transformer.from_crs(epsgIn, epsgOut) convLon = [] convLat = [] for i in range(len(data)): # x and y are exchanged because DHDN uses Northing (y) and Easting (x) C = Coords(data.y[i], data.x[i], data.z[i]) if outputDims == 3: c = transformer.transform(C.x, C.y, C.z) else: c = transformer.transform(C.x, C.y) convLon.append(c[1]) convLat.append(c[0])
def test_to_json(): transformer = Transformer.from_crs(4326, 3857) json_data = transformer.to_json() assert "Conversion" in json_data assert "\n" not in json_data
def test_equivalent_crs__disabled(): with pytest.warns(UserWarning): transformer = Transformer.from_crs("epsg:4326", 4326) assert not transformer._transformer.skip_equivalent assert transformer._transformer.projections_equivalent assert transformer._transformer.projections_exact_same