Exemplo n.º 1
0
    def compute_geometry(self, bbox, filename=None):
        """
        Parse OSM file (area in bbox) to retrieve information about geometry.

        :param Sequence[float] bbox: area to be parsed in format (min_lon, min_lat, max_lon, max_lat)
        :param Optional[str] filename: map file in .osm.pbf format or None (map will be downloaded)
        """
        assert len(bbox) == 4
        self.bbox_size = (fabs(bbox[2] - bbox[0]), fabs(bbox[3] - bbox[1]))

        if filename is None:
            converter = OsmConverter(bbox)
            filename = converter.filename

        osm = OSM(filename, bounding_box=bbox)
        multipolygons = GeoDataFrame(columns=['tag', 'geometry'])

        natural = osm.get_natural()
        if natural is not None:
            natural = natural.loc[:, ['natural', 'geometry']].rename(
                columns={'natural': 'tag'})
            self.polygons = self.polygons.append(
                natural.loc[natural.geometry.type == 'Polygon'])
            multipolygons = multipolygons.append(
                natural.loc[natural.geometry.type == 'MultiPolygon'])
            natural.drop(natural.index, inplace=True)

        landuse = osm.get_landuse()
        if landuse is not None:
            landuse = landuse.loc[:, ['landuse', 'geometry']].rename(
                columns={'landuse': 'tag'})
            self.polygons = self.polygons.append(
                landuse.loc[landuse.geometry.type == 'Polygon'])
            multipolygons = multipolygons.append(
                landuse.loc[landuse.geometry.type == 'MultiPolygon'])
            landuse.drop(landuse.index, inplace=True)

        # splitting multipolygons to polygons
        for i in range(multipolygons.shape[0]):
            tag = multipolygons.tag.iloc[i]
            for polygon in multipolygons.geometry.iloc[i].geoms:
                self.polygons = self.polygons.append(
                    {
                        'tag': tag,
                        'geometry': polygon
                    }, ignore_index=True)

        roads = osm.get_network()
        if roads is not None:
            roads = self.__dissolve(roads[["highway", "geometry"]])
            self.multilinestrings = GeoDataFrame(
                roads.loc[roads.geometry.type == 'MultiLineString']).rename(
                    columns={'highway': 'tag'})

        self.tag_value.eval(self.polygons, self.multilinestrings, "tag")
Exemplo n.º 2
0
    def get_locations_at(self, t):
        """
        Returns GeoDataFrame with trajectory locations at the specified timestamp

        Parameters
        ----------
        t : datetime.datetime
        columns : list[string]
            List of column names that should be copied from the trajectory's dataframe to the output

        Returns
        -------
        GeoDataFrame
            Trajectory locations at timestamp t
        """
        gdf = GeoDataFrame()
        for traj in self:
            if t == 'start':
                x = traj.get_row_at(traj.get_start_time())
            elif t == 'end':
                x = traj.get_row_at(traj.get_end_time())
            else:
                if t < traj.get_start_time() or t > traj.get_end_time():
                    continue
                x = traj.get_row_at(t)
            gdf = gdf.append(x)
        return gdf
Exemplo n.º 3
0
def _combine_dfs_osm(types, save_path, bbox):
    """Combine all dataframes from individual features into one GeoDataFrame
    Parameters:
        ..
    Returns:
        (gdf)
    """
    print('Combining all low-value GeoDataFrames into one GeoDataFrame...')
    OSM_features_gdf_combined = \
    GeoDataFrame(pd.DataFrame(columns=['Item', 'Name', 'Type', 'Natural_Type', 'geometry']),
                 crs='epsg:4326', geometry='geometry')
    for item in types:
        print('adding results from %s ...' % item)
        OSM_features_gdf_combined = \
        OSM_features_gdf_combined.append(
            globals()[str(item) + '_gdf_all_' + str(int(bbox[0])) + '_' + str(int(bbox[1]))],
            ignore_index=True)
    i = 0
    for geom in OSM_features_gdf_combined.geometry:
        if geom.type == 'LineString':
            OSM_features_gdf_combined.geometry[i] = geom.buffer(0.000045)
        i += 1

    OSM_features_gdf_combined.to_file(
        save_path.joinpath('OSM_features_' + str(int(bbox[0])) + '_' +
                           str(int(bbox[1])) + '.shp'))

    return OSM_features_gdf_combined
Exemplo n.º 4
0
    def get_locations_at(self, t):
        """
        Returns GeoDataFrame with trajectory locations at the specified timestamp

        Parameters
        ----------
        t : datetime.datetime

        Returns
        -------
        GeoDataFrame
            Trajectory locations at timestamp t
        """
        gdf = GeoDataFrame()
        for traj in self:
            if t == 'start':
                x = traj.get_row_at(traj.get_start_time())
            elif t == 'end':
                x = traj.get_row_at(traj.get_end_time())
            else:
                if t < traj.get_start_time() or t > traj.get_end_time():
                    continue
                x = traj.get_row_at(t)
            gdf = gdf.append(x)
        return gdf
Exemplo n.º 5
0
    def get_path(self, path):

        edge_path = GeoDataFrame(columns=self.attributes(), crs=self.crs)

        for i in range(len(path) - 1):
            try:
                edge_path = edge_path.append(
                    self._gpd_df.loc[self._from_to.index(
                        (path[i], path[i + 1]))],
                    ignore_index=True)
            except ValueError:
                edge_path = edge_path.append(
                    self._gpd_df.loc[self._from_to.index(
                        (path[i + 1], path[i]))],
                    ignore_index=True)

        return edge_path
Exemplo n.º 6
0
def upload_pano_base(gdf_base: gpd.GeoDataFrame):
    gdf_base.loc[:, "ID"] = gdf_base.index

    db_pano_base = load_postgis('pano_base')
    ori_size, new_szie = db_pano_base.shape[0], gdf_base.shape[0]
    tmp = gdf_base.append(db_pano_base).drop_duplicates("ID", keep='first')

    if ori_size == tmp.shape[0]:
        return True

    return gdf_to_postgis(tmp, 'pano_base')
Exemplo n.º 7
0
    def upload_pano_node(self, gdf: gpd.GeoDataFrame = None):
        # 经过测试,上传的 list 数据会自动序列化
        gdf = gdf if gdf is not None else self.gdf_pano_node
        gdf.loc[:, "ID"] = gdf.index

        db_pano_base = load_postgis(self.pano_node_pg)
        ori_size, new_size = db_pano_base.shape[0], gdf.shape[0]
        tmp = gdf.append(db_pano_base).drop_duplicates("ID", keep='first')

        if ori_size == tmp.shape[0]:
            return True

        return gdf_to_postgis(tmp, self.pano_node_pg)
Exemplo n.º 8
0
class TestPolygonPlotting:
    def setup_method(self):

        t1 = Polygon([(0, 0), (1, 0), (1, 1)])
        t2 = Polygon([(1, 0), (2, 0), (2, 1)])
        self.polys = GeoSeries([t1, t2], index=list('AB'))
        self.df = GeoDataFrame({'geometry': self.polys, 'values': [0, 1]})

        multipoly1 = MultiPolygon([t1, t2])
        multipoly2 = rotate(multipoly1, 180)
        self.df2 = GeoDataFrame({
            'geometry': [multipoly1, multipoly2],
            'values': [0, 1]
        })

        t3 = Polygon([(2, 0), (3, 0), (3, 1)])
        df_nan = GeoDataFrame({'geometry': t3, 'values': [np.nan]})
        self.df3 = self.df.append(df_nan)

    def test_single_color(self):

        ax = self.polys.plot(color='green')
        _check_colors(2, ax.collections[0].get_facecolors(), ['green'] * 2)
        # color only sets facecolor
        _check_colors(2, ax.collections[0].get_edgecolors(), ['k'] * 2)

        ax = self.df.plot(color='green')
        _check_colors(2, ax.collections[0].get_facecolors(), ['green'] * 2)
        _check_colors(2, ax.collections[0].get_edgecolors(), ['k'] * 2)

        with warnings.catch_warnings(record=True) as _:  # don't print warning
            # 'color' overrides 'values'
            ax = self.df.plot(column='values', color='green')
            _check_colors(2, ax.collections[0].get_facecolors(), ['green'] * 2)

    def test_vmin_vmax(self):
        # when vmin == vmax, all polygons should be the same color

        # non-categorical
        ax = self.df.plot(column='values', categorical=False, vmin=0, vmax=0)
        actual_colors = ax.collections[0].get_facecolors()
        np.testing.assert_array_equal(actual_colors[0], actual_colors[1])

        # categorical
        ax = self.df.plot(column='values', categorical=True, vmin=0, vmax=0)
        actual_colors = ax.collections[0].get_facecolors()
        np.testing.assert_array_equal(actual_colors[0], actual_colors[1])

        # vmin vmax set correctly for array with NaN (GitHub issue 877)
        ax = self.df3.plot(column='values')
        actual_colors = ax.collections[0].get_facecolors()
        assert np.any(np.not_equal(actual_colors[0], actual_colors[1]))

    def test_style_kwargs(self):

        # facecolor overrides default cmap when color is not set
        ax = self.polys.plot(facecolor='k')
        _check_colors(2, ax.collections[0].get_facecolors(), ['k'] * 2)

        # facecolor overrides more general-purpose color when both are set
        ax = self.polys.plot(color='red', facecolor='k')
        # TODO with new implementation, color overrides facecolor
        # _check_colors(2, ax.collections[0], ['k']*2, alpha=0.5)

        # edgecolor
        ax = self.polys.plot(edgecolor='red')
        np.testing.assert_array_equal([(1, 0, 0, 1)],
                                      ax.collections[0].get_edgecolors())

        ax = self.df.plot('values', edgecolor='red')
        np.testing.assert_array_equal([(1, 0, 0, 1)],
                                      ax.collections[0].get_edgecolors())

        # alpha sets both edge and face
        ax = self.polys.plot(facecolor='g', edgecolor='r', alpha=0.4)
        _check_colors(2,
                      ax.collections[0].get_facecolors(), ['g'] * 2,
                      alpha=0.4)
        _check_colors(2,
                      ax.collections[0].get_edgecolors(), ['r'] * 2,
                      alpha=0.4)

    def test_legend_kwargs(self):

        ax = self.df.plot(column='values',
                          categorical=True,
                          legend=True,
                          legend_kwds={'frameon': False})
        assert ax.get_legend().get_frame_on() is False

    def test_multipolygons(self):

        # MultiPolygons
        ax = self.df2.plot()
        assert len(ax.collections[0].get_paths()) == 4
        _check_colors(4, ax.collections[0].get_facecolors(),
                      [MPL_DFT_COLOR] * 4)

        ax = self.df2.plot('values')
        cmap = plt.get_cmap(lut=2)
        # colors are repeated for all components within a MultiPolygon
        expected_colors = [cmap(0), cmap(0), cmap(1), cmap(1)]
        _check_colors(4, ax.collections[0].get_facecolors(), expected_colors)
Exemplo n.º 9
0
# -*- coding: utf-8 -*-

from indalsig.labs.callejero.lab03 import show_ways_with_postcodes
from geopandas import GeoDataFrame
from shapely.geometry import MultiPolygon, Polygon

# Creamos el GeoDataFrame y definimos la columna geométrica y el código postal
postcodes_gdf = GeoDataFrame(columns=['geometry', 'postcode'])

# Del tutorial anterior recuperamos todos los multipolígonos del cálculo
for postcode_data in show_ways_with_postcodes.gf_postcode_poly.values:

    postcode = dict()
    postcode['postcode'] = postcode_data[1]

    # Transformamos los polígonos en multipolígonos e ignoramos lo que no sea convertible a multipolígono
    if isinstance(postcode_data[0], Polygon):
        postcode['geometry'] = MultiPolygon([postcode_data[0]])
    elif isinstance(postcode_data[0], MultiPolygon):
        postcode['geometry'] = MultiPolygon(postcode_data[0])
    else:
        print postcode_data[0]
        continue

    # Añadimos nuestra fila al GeoDataFrame
    postcodes_gdf = postcodes_gdf.append(postcode, ignore_index=True)

# Usando el driver fiona grabamos el Geopanda en formato GeoJSON
postcodes_gdf.to_file('postcodes.json', 'GeoJSON')
Exemplo n.º 10
0
class TestPolygonPlotting:
    def setup_method(self):

        t1 = Polygon([(0, 0), (1, 0), (1, 1)])
        t2 = Polygon([(1, 0), (2, 0), (2, 1)])
        self.polys = GeoSeries([t1, t2], index=list("AB"))
        self.df = GeoDataFrame({"geometry": self.polys, "values": [0, 1]})

        multipoly1 = MultiPolygon([t1, t2])
        multipoly2 = rotate(multipoly1, 180)
        self.df2 = GeoDataFrame({
            "geometry": [multipoly1, multipoly2],
            "values": [0, 1]
        })

        t3 = Polygon([(2, 0), (3, 0), (3, 1)])
        df_nan = GeoDataFrame({"geometry": t3, "values": [np.nan]})
        self.df3 = self.df.append(df_nan)

    def test_single_color(self):

        ax = self.polys.plot(color="green")
        _check_colors(2, ax.collections[0].get_facecolors(), ["green"] * 2)
        # color only sets facecolor
        _check_colors(2, ax.collections[0].get_edgecolors(), ["k"] * 2)

        ax = self.df.plot(color="green")
        _check_colors(2, ax.collections[0].get_facecolors(), ["green"] * 2)
        _check_colors(2, ax.collections[0].get_edgecolors(), ["k"] * 2)

        # check rgba tuple GH1178
        ax = self.df.plot(color=(0.5, 0.5, 0.5))
        _check_colors(2, ax.collections[0].get_facecolors(),
                      [(0.5, 0.5, 0.5)] * 2)
        ax = self.df.plot(color=(0.5, 0.5, 0.5, 0.5))
        _check_colors(2, ax.collections[0].get_facecolors(),
                      [(0.5, 0.5, 0.5, 0.5)] * 2)
        with pytest.raises(TypeError):
            self.df.plot(color="not color")

        with warnings.catch_warnings(record=True) as _:  # don't print warning
            # 'color' overrides 'values'
            ax = self.df.plot(column="values", color="green")
            _check_colors(2, ax.collections[0].get_facecolors(), ["green"] * 2)

    def test_vmin_vmax(self):
        # when vmin == vmax, all polygons should be the same color

        # non-categorical
        ax = self.df.plot(column="values", categorical=False, vmin=0, vmax=0)
        actual_colors = ax.collections[0].get_facecolors()
        np.testing.assert_array_equal(actual_colors[0], actual_colors[1])

        # categorical
        ax = self.df.plot(column="values", categorical=True, vmin=0, vmax=0)
        actual_colors = ax.collections[0].get_facecolors()
        np.testing.assert_array_equal(actual_colors[0], actual_colors[1])

        # vmin vmax set correctly for array with NaN (GitHub issue 877)
        ax = self.df3.plot(column="values")
        actual_colors = ax.collections[0].get_facecolors()
        assert np.any(np.not_equal(actual_colors[0], actual_colors[1]))

    def test_style_kwargs(self):

        # facecolor overrides default cmap when color is not set
        ax = self.polys.plot(facecolor="k")
        _check_colors(2, ax.collections[0].get_facecolors(), ["k"] * 2)

        # facecolor overrides more general-purpose color when both are set
        ax = self.polys.plot(color="red", facecolor="k")
        # TODO with new implementation, color overrides facecolor
        # _check_colors(2, ax.collections[0], ['k']*2, alpha=0.5)

        # edgecolor
        ax = self.polys.plot(edgecolor="red")
        np.testing.assert_array_equal([(1, 0, 0, 1)],
                                      ax.collections[0].get_edgecolors())

        ax = self.df.plot("values", edgecolor="red")
        np.testing.assert_array_equal([(1, 0, 0, 1)],
                                      ax.collections[0].get_edgecolors())

        # alpha sets both edge and face
        ax = self.polys.plot(facecolor="g", edgecolor="r", alpha=0.4)
        _check_colors(2,
                      ax.collections[0].get_facecolors(), ["g"] * 2,
                      alpha=0.4)
        _check_colors(2,
                      ax.collections[0].get_edgecolors(), ["r"] * 2,
                      alpha=0.4)

        # check rgba tuple GH1178 for face and edge
        ax = self.df.plot(facecolor=(0.5, 0.5, 0.5), edgecolor=(0.4, 0.5, 0.6))
        _check_colors(2, ax.collections[0].get_facecolors(),
                      [(0.5, 0.5, 0.5)] * 2)
        _check_colors(2, ax.collections[0].get_edgecolors(),
                      [(0.4, 0.5, 0.6)] * 2)

        ax = self.df.plot(facecolor=(0.5, 0.5, 0.5, 0.5),
                          edgecolor=(0.4, 0.5, 0.6, 0.5))
        _check_colors(2, ax.collections[0].get_facecolors(),
                      [(0.5, 0.5, 0.5, 0.5)] * 2)
        _check_colors(2, ax.collections[0].get_edgecolors(),
                      [(0.4, 0.5, 0.6, 0.5)] * 2)

    def test_legend_kwargs(self):

        ax = self.df.plot(
            column="values",
            categorical=True,
            legend=True,
            legend_kwds={"frameon": False},
        )
        assert ax.get_legend().get_frame_on() is False

    def test_colorbar_kwargs(self):
        # Test if kwargs are passed to colorbar

        label_txt = "colorbar test"

        ax = self.df.plot(
            column="values",
            categorical=False,
            legend=True,
            legend_kwds={"label": label_txt},
        )

        assert ax.get_figure().axes[1].get_ylabel() == label_txt

        ax = self.df.plot(
            column="values",
            categorical=False,
            legend=True,
            legend_kwds={
                "label": label_txt,
                "orientation": "horizontal"
            },
        )

        assert ax.get_figure().axes[1].get_xlabel() == label_txt

    def test_multipolygons(self):

        # MultiPolygons
        ax = self.df2.plot()
        assert len(ax.collections[0].get_paths()) == 4
        _check_colors(4, ax.collections[0].get_facecolors(),
                      [MPL_DFT_COLOR] * 4)

        ax = self.df2.plot("values")
        cmap = plt.get_cmap(lut=2)
        # colors are repeated for all components within a MultiPolygon
        expected_colors = [cmap(0), cmap(0), cmap(1), cmap(1)]
        _check_colors(4, ax.collections[0].get_facecolors(), expected_colors)

        ax = self.df2.plot(color=["r", "b"])
        # colors are repeated for all components within a MultiPolygon
        _check_colors(4, ax.collections[0].get_facecolors(),
                      ["r", "r", "b", "b"])

    def test_subplots_norm(self):
        # colors of subplots are the same as for plot (norm is applied)
        cmap = matplotlib.cm.viridis_r
        norm = matplotlib.colors.Normalize(vmin=0, vmax=10)
        ax = self.df.plot(column="values", cmap=cmap, norm=norm)
        actual_colors_orig = ax.collections[0].get_facecolors()
        exp_colors = cmap(np.arange(2) / (10))
        np.testing.assert_array_equal(exp_colors, actual_colors_orig)
        fig, ax = plt.subplots()
        self.df[1:].plot(column="values", ax=ax, norm=norm, cmap=cmap)
        actual_colors_sub = ax.collections[0].get_facecolors()
        np.testing.assert_array_equal(actual_colors_orig[1],
                                      actual_colors_sub[0])
Exemplo n.º 11
0
class OsmParser(object):
    def __init__(self):
        self.polygons = GeoDataFrame(columns=['tag', 'geometry'])
        self.multilinestrings = GeoDataFrame(columns=['tag', 'geometry'])
        self.tag_value = TagValue()
        self.bbox_size = None

    @staticmethod
    def __get_first_point(line):
        coords = mapping(line)['coordinates']
        return coords[0][0] if isinstance(coords[0][0], tuple) else coords[0]

    @staticmethod
    def __get_last_point(line):
        coords = mapping(line)['coordinates']
        return coords[-1][1] if isinstance(coords[0][0], tuple) else coords[1]

    def __dissolve(self, src_roads):
        roads = src_roads.copy()
        current = 1
        points = dict()
        for index, row in roads.iterrows():
            start = self.__get_first_point(row.geometry)
            end = self.__get_last_point(row.geometry)
            for number, borders in points.items():
                if start in borders:
                    roads.at[index, "id"] = number
                    borders.add(end)
                    break
                if end in borders:
                    roads.at[index, "id"] = number
                    borders.add(start)
                    break
            else:
                roads.at[index, "id"] = current
                points[current] = set()
                points[current].add(start)
                points[current].add(end)
                current += 1
        return roads.dissolve(by="id").reset_index().drop(columns=["id"])

    def compute_geometry(self, bbox, filename=None):
        """
        Parse OSM file (area in bbox) to retrieve information about geometry.

        :param Sequence[float] bbox: area to be parsed in format (min_lon, min_lat, max_lon, max_lat)
        :param Optional[str] filename: map file in .osm.pbf format or None (map will be downloaded)
        """
        assert len(bbox) == 4
        self.bbox_size = (fabs(bbox[2] - bbox[0]), fabs(bbox[3] - bbox[1]))

        if filename is None:
            converter = OsmConverter(bbox)
            filename = converter.filename

        osm = OSM(filename, bounding_box=bbox)
        multipolygons = GeoDataFrame(columns=['tag', 'geometry'])

        natural = osm.get_natural()
        if natural is not None:
            natural = natural.loc[:, ['natural', 'geometry']].rename(
                columns={'natural': 'tag'})
            self.polygons = self.polygons.append(
                natural.loc[natural.geometry.type == 'Polygon'])
            multipolygons = multipolygons.append(
                natural.loc[natural.geometry.type == 'MultiPolygon'])
            natural.drop(natural.index, inplace=True)

        landuse = osm.get_landuse()
        if landuse is not None:
            landuse = landuse.loc[:, ['landuse', 'geometry']].rename(
                columns={'landuse': 'tag'})
            self.polygons = self.polygons.append(
                landuse.loc[landuse.geometry.type == 'Polygon'])
            multipolygons = multipolygons.append(
                landuse.loc[landuse.geometry.type == 'MultiPolygon'])
            landuse.drop(landuse.index, inplace=True)

        # splitting multipolygons to polygons
        for i in range(multipolygons.shape[0]):
            tag = multipolygons.tag.iloc[i]
            for polygon in multipolygons.geometry.iloc[i].geoms:
                self.polygons = self.polygons.append(
                    {
                        'tag': tag,
                        'geometry': polygon
                    }, ignore_index=True)

        roads = osm.get_network()
        if roads is not None:
            roads = self.__dissolve(roads[["highway", "geometry"]])
            self.multilinestrings = GeoDataFrame(
                roads.loc[roads.geometry.type == 'MultiLineString']).rename(
                    columns={'highway': 'tag'})

        self.tag_value.eval(self.polygons, self.multilinestrings, "tag")
Exemplo n.º 12
0
    def append_raw_overhanging_PV_installations_to_intersected_installations(
        self,
        raw_overhanging_PV_installations: gpd.GeoDataFrame = None,
        raw_PV_installations_on_rooftop: gpd.GeoDataFrame = None,
    ) -> gpd.GeoDataFrame:
        """
        PV polygons which do not intersect with a rooftop polygon, although they do border to a rooftop, are matched to
        their nearest rooftop geometry and appended to the GeoDataFrame listing all rooftop PV polygons

        Parameters
        ----------
        raw_overhanging_PV_installations: GeoPandas.GeoDataFrame
            GeoDataFrame which specifies all the PV polygons which border to a rooftop, but are not intersected with
            a rooftop geometry
        raw_PV_installations_on_rooftop: GeoPandas.GeoDataFrame
            GeoDataFrame which specifies all the PV polygons which are intersected with a rooftop geometry

        Returns
        -------
        GeoPandas.GeoDataFrame
            GeoDataFrame where overhanging PV installations have been enriched with the attributes of the closest
            rooftop and appended to raw_PV_installations_on_rooftop
        """

        # IMPORTANT: if ckdnearest is used always reset_index before
        raw_overhanging_PV_installations = raw_overhanging_PV_installations.reset_index(
            drop=True)

        raw_overhanging_PV_installations.rename(
            columns={"identifier": "identifier_diff"}, inplace=True)

        # Extract centroid from intersected PV polygons while preserving their polygon geometry
        raw_PV_installations_on_rooftop[
            "geometry_intersected_polygon"] = raw_PV_installations_on_rooftop[
                "geometry"]
        raw_PV_installations_on_rooftop[
            "geometry"] = raw_PV_installations_on_rooftop["geometry"].centroid
        raw_PV_installations_on_rooftop[
            "centroid_intersect"] = raw_PV_installations_on_rooftop["geometry"]

        raw_overhanging_pv_installations_enriched_with_closest_rooftop_data = self.enrich_raw_overhanging_pv_installations_with_closest_rooftop_attributes(
            raw_overhanging_PV_installations, raw_PV_installations_on_rooftop)

        raw_PV_installations_on_rooftop.geometry = (
            raw_PV_installations_on_rooftop.geometry_intersected_polygon)

        raw_PV_installations_on_rooftop = raw_PV_installations_on_rooftop[[
            "raw_area",
            "identifier",
            "Area",
            "Azimuth",
            "Building_I",
            "City",
            "PostalCode",
            "RoofTopID",
            "RooftopTyp",
            "Street",
            "StreetNumb",
            "Tilt",
            "area_inter",
            "geometry",
        ]]

        # Append the dataframe of all raw overhanging PV installations, enriched with the
        # rooftop attributes of their nearest rooftop, to the dataframe of all intersected PV installations
        # Note 1: Attributes starting with capital letters specify rooftop attributes.
        # Note 2: The geometry of the overhanging PV installations is not yet dissolved with the geometry of the
        # intersected PV installations
        raw_PV_installations_on_rooftop = gpd.GeoDataFrame(
            raw_PV_installations_on_rooftop.append(
                raw_overhanging_pv_installations_enriched_with_closest_rooftop_data
            )).reset_index(drop=True)

        return raw_PV_installations_on_rooftop
Exemplo n.º 13
0
def main():
    parser = argparse.ArgumentParser(
        'Plots shore lines using a perspective projection')
    parser.add_argument('--image',
                        help="image on top of which the map is projected")
    parser.add_argument('--fov', type=float, help="fov in width")
    parser.add_argument('--width', type=int, help="image width in pixels")
    parser.add_argument('--height', type=int, help="image height in pixels")
    parser.add_argument('--lat', type=float, help="satellite latitude")
    parser.add_argument('--lon', type=float, help="satellite longitude")
    parser.add_argument('--alt', type=float, help="satellite altitude")
    parser.add_argument('--head',
                        type=float,
                        help="satellite heading: 0=north, 90=east, 180=south")
    parser.add_argument('--tilt',
                        type=float,
                        help="satellite tilt: 0=down, 90=flight direction")
    parser.add_argument(
        '--roll',
        type=float,
        help=
        "satellite roll: 0 = \"up\" towards north (?), 90 = \"up\" towards east (?)"
    )
    args = parser.parse_args()

    # 4326 is same as cartopy.crs.PlateCarree()
    crs4326 = CRS.from_epsg(4326)
    bbox = (args.lon - 45, args.lat - 20, args.lon + 45,
            args.lat + 20) if False else None
    sc = '10m'
    types = OrderedDict()

    if 0:
        # from open street maps
        coast = gp.read_file(
            'zip://./coastlines-split-4326.zip!coastlines-split-4326/lines.shp',
            bbox=bbox)
        coast.set_crs(crs4326)
    else:
        cst = cartopy.feature.NaturalEarthFeature(category='physical',
                                                  name='coastline',
                                                  scale=sc)
        coast = GeoDataFrame(geometry=list(cst.geometries()), crs=crs4326)
    coast['type'] = ['coast'] * len(coast)
    types['coast'] = dict(facecolor='none',
                          edgecolor='black',
                          linewidth=1,
                          alpha=0.5)

    if 0:
        # from https://gadm.org/download_country_v3.html
        borders = gp.read_file('zip://./gadm36_FIN_shp.zip!gadm36_FIN_0.shp',
                               bbox=bbox)
        borders.set_crs(crs4326)
    else:
        brds = cartopy.feature.NaturalEarthFeature(
            category='cultural', name='admin_0_boundary_lines_land', scale=sc)
        borders = GeoDataFrame(geometry=list(brds.geometries()), crs=crs4326)
    borders['type'] = ['border'] * len(borders)
    types['border'] = dict(facecolor='none',
                           edgecolor='black',
                           linewidth=1,
                           alpha=0.5)

    # TODO: add horizon
    # TODO: add cities

    feats = coast.append(borders, ignore_index=True)

    if 0:
        # magnetic declination (9.67 deg east) using this: https://www.ngdc.noaa.gov/geomag/calculators/magcalc.shtml
        slice = get_meas_slice((78.148, 16.043, 520),
                               9.67, (150, 175),
                               crs4326,
                               dist=3e6)  # (150, 170)
        slice['type'] = ['slice'] * len(slice)
        types['slice'] = dict(facecolor='none',
                              edgecolor='white',
                              linestyle='--',
                              linewidth=1,
                              alpha=0.5)
        feats = feats.append(slice, ignore_index=True)

    perspective_projection(feats, (args.lat, args.lon, args.alt),
                           (args.head, args.tilt, args.roll + 180), args.fov,
                           args.width, args.height)

    if args.image:
        img = plt.imread(args.image)
        ax = plt.imshow(img).axes
    else:
        fig, ax = plt.subplots()

    for type, styling in types.items():
        feats.loc[feats['type'] == type].plot(ax=ax, **styling)
    ax.set_xlim(0, args.width)
    ax.set_ylim(args.height, 0)
    plt.show()
    print('finished')
Exemplo n.º 14
0
class TestPolygonPlotting:

    def setup_method(self):

        t1 = Polygon([(0, 0), (1, 0), (1, 1)])
        t2 = Polygon([(1, 0), (2, 0), (2, 1)])
        self.polys = GeoSeries([t1, t2], index=list('AB'))
        self.df = GeoDataFrame({'geometry': self.polys, 'values': [0, 1]})

        multipoly1 = MultiPolygon([t1, t2])
        multipoly2 = rotate(multipoly1, 180)
        self.df2 = GeoDataFrame({'geometry': [multipoly1, multipoly2],
                                 'values': [0, 1]})

        t3 = Polygon([(2, 0), (3, 0), (3, 1)])
        df_nan = GeoDataFrame({'geometry': t3, 'values': [np.nan]})
        self.df3 = self.df.append(df_nan)

    def test_single_color(self):

        ax = self.polys.plot(color='green')
        _check_colors(2, ax.collections[0].get_facecolors(), ['green']*2)
        # color only sets facecolor
        _check_colors(2, ax.collections[0].get_edgecolors(), ['k'] * 2)

        ax = self.df.plot(color='green')
        _check_colors(2, ax.collections[0].get_facecolors(), ['green']*2)
        _check_colors(2, ax.collections[0].get_edgecolors(), ['k'] * 2)

        with warnings.catch_warnings(record=True) as _:  # don't print warning
            # 'color' overrides 'values'
            ax = self.df.plot(column='values', color='green')
            _check_colors(2, ax.collections[0].get_facecolors(), ['green']*2)

    def test_vmin_vmax(self):
        # when vmin == vmax, all polygons should be the same color

        # non-categorical
        ax = self.df.plot(column='values', categorical=False, vmin=0, vmax=0)
        actual_colors = ax.collections[0].get_facecolors()
        np.testing.assert_array_equal(actual_colors[0], actual_colors[1])

        # categorical
        ax = self.df.plot(column='values', categorical=True, vmin=0, vmax=0)
        actual_colors = ax.collections[0].get_facecolors()
        np.testing.assert_array_equal(actual_colors[0], actual_colors[1])

        # vmin vmax set correctly for array with NaN (GitHub issue 877)
        ax = self.df3.plot(column='values')
        actual_colors = ax.collections[0].get_facecolors()
        assert np.any(np.not_equal(actual_colors[0], actual_colors[1]))


    def test_style_kwargs(self):

        # facecolor overrides default cmap when color is not set
        ax = self.polys.plot(facecolor='k')
        _check_colors(2, ax.collections[0].get_facecolors(), ['k']*2)

        # facecolor overrides more general-purpose color when both are set
        ax = self.polys.plot(color='red', facecolor='k')
        # TODO with new implementation, color overrides facecolor
        # _check_colors(2, ax.collections[0], ['k']*2, alpha=0.5)

        # edgecolor
        ax = self.polys.plot(edgecolor='red')
        np.testing.assert_array_equal([(1, 0, 0, 1)],
                                      ax.collections[0].get_edgecolors())

        ax = self.df.plot('values', edgecolor='red')
        np.testing.assert_array_equal([(1, 0, 0, 1)],
                                      ax.collections[0].get_edgecolors())

        # alpha sets both edge and face
        ax = self.polys.plot(facecolor='g', edgecolor='r', alpha=0.4)
        _check_colors(2, ax.collections[0].get_facecolors(), ['g'] * 2, alpha=0.4)
        _check_colors(2, ax.collections[0].get_edgecolors(), ['r'] * 2, alpha=0.4)

    def test_legend_kwargs(self):

        ax = self.df.plot(column='values', categorical=True, legend=True,
                          legend_kwds={'frameon': False})
        assert ax.get_legend().get_frame_on() is False

    def test_multipolygons(self):

        # MultiPolygons
        ax = self.df2.plot()
        assert len(ax.collections[0].get_paths()) == 4
        _check_colors(4, ax.collections[0].get_facecolors(), [MPL_DFT_COLOR]*4)

        ax = self.df2.plot('values')
        cmap = plt.get_cmap(lut=2)
        # colors are repeated for all components within a MultiPolygon
        expected_colors = [cmap(0), cmap(0), cmap(1), cmap(1)]
        _check_colors(4, ax.collections[0].get_facecolors(), expected_colors)
Exemplo n.º 15
0
postcodes_arr = gf.postcode.unique()
postcodes_arr.sort()

# Creación del GeoDataFrame con los poligonos de los CPs
gf_postcode_poly = GeoDataFrame(columns=['geometry', 'postcode'])
for postcode in postcodes_arr:

    points = []
    for point in gf.loc[gf.postcode == postcode].geometry:
        points.append(point)

    if ALGORITHM == 'ALPHA':
        alpha_geometry, edge_points = alpha_shape.alpha_shape(points, 1000)
        gf_postcode_poly = gf_postcode_poly.append(
            {
                'geometry': alpha_geometry,
                'postcode': postcode
            },
            ignore_index=True)
    elif ALGORITHM == 'CONVEX':
        multipoint = MultiPoint(points)
        gf_postcode_poly = gf_postcode_poly.append(
            {
                'geometry': multipoint.convex_hull,
                'postcode': postcode
            },
            ignore_index=True)

# Serie de colores para cada uno de los códigos postales
colors = sns.hls_palette(len(postcodes_arr))
colormap = ListedColormap(colors)
base = gs.plot(color='blue')
Exemplo n.º 16
0
class TestSpatialJoinNYBB:
    def setup_method(self):
        nybb_filename = geopandas.datasets.get_path("nybb")
        self.polydf = read_file(nybb_filename)
        self.crs = self.polydf.crs
        N = 20
        b = [int(x) for x in self.polydf.total_bounds]
        self.pointdf = GeoDataFrame(
            [
                {"geometry": Point(x, y), "pointattr1": x + y, "pointattr2": x - y}
                for x, y in zip(
                    range(b[0], b[2], int((b[2] - b[0]) / N)),
                    range(b[1], b[3], int((b[3] - b[1]) / N)),
                )
            ],
            crs=self.crs,
        )

    def test_geometry_name(self):
        # test sjoin is working with other geometry name
        polydf_original_geom_name = self.polydf.geometry.name
        self.polydf = self.polydf.rename(columns={"geometry": "new_geom"}).set_geometry(
            "new_geom"
        )
        assert polydf_original_geom_name != self.polydf.geometry.name
        res = sjoin(self.polydf, self.pointdf, how="left")
        assert self.polydf.geometry.name == res.geometry.name

    def test_sjoin_left(self):
        df = sjoin(self.pointdf, self.polydf, how="left")
        assert df.shape == (21, 8)
        for i, row in df.iterrows():
            assert row.geometry.type == "Point"
        assert "pointattr1" in df.columns
        assert "BoroCode" in df.columns

    def test_sjoin_right(self):
        # the inverse of left
        df = sjoin(self.pointdf, self.polydf, how="right")
        df2 = sjoin(self.polydf, self.pointdf, how="left")
        assert df.shape == (12, 8)
        assert df.shape == df2.shape
        for i, row in df.iterrows():
            assert row.geometry.type == "MultiPolygon"
        for i, row in df2.iterrows():
            assert row.geometry.type == "MultiPolygon"

    def test_sjoin_inner(self):
        df = sjoin(self.pointdf, self.polydf, how="inner")
        assert df.shape == (11, 8)

    def test_sjoin_op(self):
        # points within polygons
        df = sjoin(self.pointdf, self.polydf, how="left", op="within")
        assert df.shape == (21, 8)
        assert df.loc[1]["BoroName"] == "Staten Island"

        # points contain polygons? never happens so we should have nulls
        df = sjoin(self.pointdf, self.polydf, how="left", op="contains")
        assert df.shape == (21, 8)
        assert np.isnan(df.loc[1]["Shape_Area"])

    def test_sjoin_bad_op(self):
        # AttributeError: 'Point' object has no attribute 'spandex'
        with pytest.raises(ValueError):
            sjoin(self.pointdf, self.polydf, how="left", op="spandex")

    def test_sjoin_duplicate_column_name(self):
        pointdf2 = self.pointdf.rename(columns={"pointattr1": "Shape_Area"})
        df = sjoin(pointdf2, self.polydf, how="left")
        assert "Shape_Area_left" in df.columns
        assert "Shape_Area_right" in df.columns

    @pytest.mark.parametrize("how", ["left", "right", "inner"])
    def test_sjoin_named_index(self, how):
        # original index names should be unchanged
        pointdf2 = self.pointdf.copy()
        pointdf2.index.name = "pointid"
        polydf = self.polydf.copy()
        polydf.index.name = "polyid"

        res = sjoin(pointdf2, polydf, how=how)
        assert pointdf2.index.name == "pointid"
        assert polydf.index.name == "polyid"

        # original index name should pass through to result
        if how == "right":
            assert res.index.name == "polyid"
        else:  # how == "left", how == "inner"
            assert res.index.name == "pointid"

    def test_sjoin_values(self):
        # GH190
        self.polydf.index = [1, 3, 4, 5, 6]
        df = sjoin(self.pointdf, self.polydf, how="left")
        assert df.shape == (21, 8)
        df = sjoin(self.polydf, self.pointdf, how="left")
        assert df.shape == (12, 8)

    @pytest.mark.xfail
    def test_no_overlapping_geometry(self):
        # Note: these tests are for correctly returning GeoDataFrame
        # when result of the join is empty

        df_inner = sjoin(self.pointdf.iloc[17:], self.polydf, how="inner")
        df_left = sjoin(self.pointdf.iloc[17:], self.polydf, how="left")
        df_right = sjoin(self.pointdf.iloc[17:], self.polydf, how="right")

        expected_inner_df = pd.concat(
            [
                self.pointdf.iloc[:0],
                pd.Series(name="index_right", dtype="int64"),
                self.polydf.drop("geometry", axis=1).iloc[:0],
            ],
            axis=1,
        )

        expected_inner = GeoDataFrame(expected_inner_df, crs="epsg:4326")

        expected_right_df = pd.concat(
            [
                self.pointdf.drop("geometry", axis=1).iloc[:0],
                pd.concat(
                    [
                        pd.Series(name="index_left", dtype="int64"),
                        pd.Series(name="index_right", dtype="int64"),
                    ],
                    axis=1,
                ),
                self.polydf,
            ],
            axis=1,
        )

        expected_right = GeoDataFrame(expected_right_df, crs="epsg:4326").set_index(
            "index_right"
        )

        expected_left_df = pd.concat(
            [
                self.pointdf.iloc[17:],
                pd.Series(name="index_right", dtype="int64"),
                self.polydf.iloc[:0].drop("geometry", axis=1),
            ],
            axis=1,
        )

        expected_left = GeoDataFrame(expected_left_df, crs="epsg:4326")

        assert expected_inner.equals(df_inner)
        assert expected_right.equals(df_right)
        assert expected_left.equals(df_left)

    @pytest.mark.skip("Not implemented")
    def test_sjoin_outer(self):
        df = sjoin(self.pointdf, self.polydf, how="outer")
        assert df.shape == (21, 8)

    def test_sjoin_empty_geometries(self):
        # https://github.com/geopandas/geopandas/issues/944
        empty = GeoDataFrame(geometry=[GeometryCollection()] * 3)
        df = sjoin(self.pointdf.append(empty), self.polydf, how="left")
        assert df.shape == (24, 8)
        df2 = sjoin(self.pointdf, self.polydf.append(empty), how="left")
        assert df2.shape == (21, 8)
Exemplo n.º 17
0
        vertices = DataFrame(vertices).drop_duplicates().values
    x = np.mean(vertices[:,0])
    new['x'] = x
    y = np.mean(vertices[:,1])
    new['y'] = y

    new_poly = MultiPolygon()
    for poly in concat['region']:
        new_poly = new_poly.union(poly)
    new['region'] = [new_poly]

    new['v_area'] = new['region'][0].area
    new['v_larea'] = np.log(new['v_area'])

    new = DataFrame(new)
    stops = stops.append(new)

print "OK"

print 'calculating connectedness...'

print '    building representation of subway system...'
# # 2.3 calculate connectedness
# # 2.3.1 generate unique routes
trips = read_csv('data/indata/google_transit/stop_times.txt')
ids = trips['trip_id'].unique()

# find weekday ('WKD') trips
starts = trips[trips['stop_sequence']==1]
wkd = ['WKD' in i for i in starts['trip_id']]
wkd_starts = starts[wkd]