Beispiel #1
0
    def write_data(self, driver):
        filename = DRIVER_FILENAME[driver]
        temp_dir = tempfile.mkdtemp()
        path = os.path.join(temp_dir, filename)
        schema = {
            "geometry": GEOMETRY_TYPE,
            "properties": {
                "date": "date",
            }
        }
        records = [
            {
                "geometry": GEOMETRY_EXAMPLE,
                "properties": {
                    "date": DATE_EXAMPLE,
                }
            },
            {
                "geometry": GEOMETRY_EXAMPLE,
                "properties": {
                    "date": None,
                }
            },
        ]
        with fiona.Env(), fiona.open(path, "w", driver=driver,
                                     schema=schema) as collection:
            collection.writerecords(records)

        with fiona.Env(), fiona.open(path, "r") as collection:
            schema = collection.schema
            features = list(collection)

        shutil.rmtree(temp_dir)

        return schema, features
Beispiel #2
0
def main():
    logging.basicConfig(
        stream=sys.stdout,
        level=logging.INFO,
        format='%(asctime)-15s %(name)s %(levelname)-8s %(message)s')

    parser = argparse.ArgumentParser(
        description='Import species range polygons into TSX database')
    parser.add_argument('dir',
                        type=str,
                        help='Directory containing species range shapefiles')
    args = parser.parse_args()

    session = get_session()

    filenames = [f for f in os.listdir(args.dir) if f.endswith('.shp')]

    for filename in tqdm(filenames):
        spno = int(filename[0:-4])
        try:
            # https://pyproj4.github.io/pyproj/stable/crs_compatibility.html#fiona
            with fiona.Env(OSR_WKT_FORMAT="WKT2_2018"), fiona.open(
                    os.path.join(args.dir,
                                 filename), encoding='Windows-1252') as shp:
                process_shp(session, spno, shp)
        except KeyboardInterrupt:
            log.info("Aborting - no changes saved")
            return

    session.commit()
Beispiel #3
0
def import_with_fiona(fpath, source):
    """
    Use fiona to import a parcel file.

    Return a list of dict objects containing WKT-formatted geometries in
    addition to any metadata.
    """
    shapes = []

    try:
        with fiona.Env():
            data = fiona.open(fpath)
            for obj in data:
                try:
                    shape = scrape_fiona_metadata(obj, source)
                    geom = to_shapely_obj(obj)
                    if geom:
                        shape['geom'] = dumps(geom)
                        shapes.append(shape)
                except Exception as e:
                    _L.warning('error loading shape from fiona. {}'.format(e))
    except Exception as e:
        _L.warning('error importing file. {}'.format(e))

    return shapes
Beispiel #4
0
def to_kml(self, kml_file):
    """
    write a kml file with site location

    :param kml_file: output kml file
    :return: self
    """
    # adapted from https://gist.github.com/mazzma12/0a32ce693bb42b742252caabb98519db

    import fiona
    import geopandas as gpd
    from shapely.geometry import Point

    # Enable fiona driver
    gpd.io.file.fiona.drvsupport.supported_drivers['KML'] = 'rw'

    # get coordinates from ts
    llong = []
    llat = []
    lcode = []
    lPoint = []

    for code in sorted(self.lcode()):
        lcode.append(code)
        llong.append(self.__dict__[code].lon)
        llat.append(self.__dict__[code].lat)
        lPoint.append(Point(self.__dict__[code].lon, self.__dict__[code].lat))
    d = {'NAME': lcode, 'geometry': lPoint}
    gdf = gpd.GeoDataFrame(d, crs="EPSG:4326")

    # Write file
    with fiona.Env():
        # Might throw a WARNING - CPLE_NotSupported in b'dataset sample_out.kml does not support layer creation option ENCODING'
        gdf.to_file(kml_file, driver='KML')
Beispiel #5
0
 def reproject(self, new_crs):
     with fiona.Env():
         if new_crs == self.crs:
             return self
         else:
             new_shape = transform(self._shape, self._crs, new_crs)
             return self.__class__(new_shape, new_crs)
Beispiel #6
0
    def regional_cfs(self):
        positive_water_flows = list(self._water_flows(WATER_RESOURCE))
        negative_water_flows = list(self._water_flows(WATER_RELEASE))

        for obj in self.global_cfs():
            yield obj

        with fiona.Env():
            with fiona.open(self.vector_ds) as src:
                for feat in src:
                    for key in positive_water_flows:
                        if not isinstance(feat["properties"][self.column],
                                          Number):
                            continue
                        yield (
                            key,
                            feat["properties"][self.column],
                            (self.geocollection,
                             feat["properties"][self.id_column]),
                        )

                    for key in negative_water_flows:
                        if not isinstance(feat["properties"][self.column],
                                          Number):
                            continue
                        yield (
                            key,
                            feat["properties"][self.column] * -1,
                            (self.geocollection,
                             feat["properties"][self.id_column]),
                        )
Beispiel #7
0
def getGeom(inputfn, epsg):
    """Retourne:
       - géometrie de inputfn
       - booleen à True si toutes les geom sont valides
       - l'epsg source s'il a pu être détecté
    """
    print("Récupération des géometries")
    polygonGeoms = []
    all_valid = True
    with fiona.Env():
        with fiona.open(inputfn) as source:
            if len(source.crs) > 0:
                source_epsg = source.crs
            elif epsg is not None:
                source_epsg = {'init': 'epsg:' + epsg}
            else:
                print(
                    "Aucun fichier prj détecté et/ou aucune projection source passée avec -epsg"
                )
                sys.exit(1)

            if source_epsg['init'] == 'epsg:4326':
                print("Le SHP d'origne doit être dans une projection en mètre")
                print("détecté:", source_epsg)
                sys.exit(1)

            items = source.items()
            for key, value in items:
                geom = shape(value["geometry"])
                if not geom.is_valid:
                    all_valid = False
                polygonGeoms.append(geom)

            return polygonGeoms, all_valid, source_epsg
Beispiel #8
0
    def export_stations(self, crs='EPSG:4326'):

        gdf = geopandas.GeoDataFrame(self.StationModel._data,
                                     geometry=geopandas.points_from_xy(
                                         self.StationModel._data.longitude,
                                         self.StationModel._data.latitude),
                                     crs=crs)

        savename, svext = QFileDialog.getSaveFileName(
            self,
            'Save File',
            filter=
            "Shapefile (*.shp);;Google Earth (*.kml);;Geopackage (*.gpkg);;GeoJSON (*.geojson)"
        )
        print(savename, svext)
        if svext == "Shapefile (*.shp)":
            gdf.to_file(savename, driver="ESRI Shapefile")
        elif svext == "Google Earth (*.kml)":
            # Write file
            with fiona.Env():
                # Might throw a WARNING - CPLE_NotSupported in b'dataset sample_out.kml does not support layer creation option ENCODING'
                gdf.to_file(savename, driver='KML')
        elif svext == "Geopackage (*.gpkg)":
            gdf.to_file(savename, layer='stations', driver="GPKG")
        elif svext == "GeoJSON (*.geojson)":
            gdf.to_file(savename, driver='GeoJSON')
Beispiel #9
0
def add_asdf_id(path):
    """Adds unique id field (asdf_id) and outputs geojson

    serves as shp to geojson converter as well
    also sets permissions for files
    """
    with fiona.Env(OGR_GEOJSON_MAX_OBJ_SIZE="5000MB"):
        geo_df = gpd.GeoDataFrame.from_file(path)

    is_invalid = ~geo_df.is_valid
    geo_df.loc[is_invalid, 'geometry'] = geo_df.loc[is_invalid].buffer(0)

    if sum(~geo_df.is_valid) != 0:
        raise Exception('Invalid geometry could not be corrected')

    geo_df["asdf_id"] = range(len(geo_df))
    geo_df["gqid"] = range(len(geo_df))

    geo_json = geo_df.to_json()
    geo_path = os.path.splitext(path)[0] + ".geojson.tmp"
    with open(geo_path, "w") as geo_file:
        json.dump(json.loads(geo_json), geo_file)
    os.chmod(geo_path, 0o664)
    os.rename(geo_path, geo_path[:-4])

    # create simplified geojson for use with leaflet web map
    geo_df['geometry'] = geo_df['geometry'].simplify(0.01)
    simple_geo_path = os.path.dirname(path) + "/simplified.geojson.tmp"
    with open(simple_geo_path, "w") as simple_geo_file:
        json.dump(json.loads(geo_df.to_json()), simple_geo_file)
    os.chmod(simple_geo_path, 0o664)
    os.rename(simple_geo_path, simple_geo_path[:-4])

    return 0
def _generic_exporter(
    lca,
    geocollection,
    filepath,
    spatial_dict,
    spatial_func,
    score_column_absolute="score_abs",
    score_column_relative="score_rel",
    cutoff=1e-3,
):
    assert isinstance(lca, RegionalizationBase)
    assert geocollection in geocollections
    assert hasattr(lca, spatial_dict)
    assert os.path.isfile(geocollections[geocollection].get("filepath"))
    assert not os.path.isfile(filepath)

    vector = np.ravel(getattr(lca, spatial_func)().sum(axis=0))
    lca.fix_spatial_dictionaries()
    total = lca.score
    cut = abs(lca.score * cutoff)
    field = geocollections[geocollection].get("field")
    assert field

    # TODO: Might need to make this nicer/more robust
    if not filepath.endswith(".geojson"):
        filepath += ".geojson"

    if geocollection == "world":
        # Special case; "world" is just a string, not a tuple
        results = {
            spatial_key: vector[index]
            for spatial_key, index in getattr(lca, spatial_dict).items()
            if abs(vector[index]) >= cut
        }
    else:
        results = {
            spatial_key[1]: vector[index]
            for spatial_key, index in getattr(lca, spatial_dict).items()
            if abs(vector[index]) >= cut
        }

    with fiona.Env():
        with fiona.open(geocollections[geocollection]["filepath"]) as source:
            meta = source.meta
            meta["driver"] = "GeoJSON"
            meta["schema"]["properties"].update({
                score_column_absolute: "float",
                score_column_relative: "float"
            })

            with fiona.open(filepath, "w", **meta) as sink:
                for feature in source:
                    try:
                        score = results[feature["properties"][field]]
                        feature["properties"][score_column_absolute] = score
                        feature["properties"][score_column_relative] = abs(
                            score / total)
                        sink.write(feature)
                    except KeyError:
                        continue
Beispiel #11
0
def test_update_tag_item(layer, namespace, tmpdir):
    test_geopackage = str(tmpdir.join("test.gpkg"))
    schema = {'properties': {'CDATA1': 'str:254'}, 'geometry': 'Polygon'}
    with fiona.Env(), fiona.open(test_geopackage,
                                 "w",
                                 driver="GPKG",
                                 schema=schema,
                                 layer=layer) as gpkg:
        assert gpkg.get_tag_item("test_tag1", ns=namespace) is None
        gpkg.update_tag_item("test_tag1", "test_value1", ns=namespace)

    with fiona.Env(), fiona.open(test_geopackage, layer=layer) as gpkg:
        if namespace is not None:
            assert gpkg.get_tag_item("test_tag1") is None
        assert gpkg.get_tag_item("test_tag1", ns=namespace) == "test_value1"
        with pytest.raises(UnsupportedOperation):
            gpkg.update_tag_item("test_tag1", "test_value1", ns=namespace)
Beispiel #12
0
def test_update_tags(layer, namespace, tags, tmpdir):
    test_geopackage = str(tmpdir.join("test.gpkg"))
    schema = {'properties': {'CDATA1': 'str:254'}, 'geometry': 'Polygon'}
    with fiona.Env(), fiona.open(test_geopackage,
                                 "w",
                                 driver="GPKG",
                                 schema=schema,
                                 layer=layer) as gpkg:
        assert gpkg.tags() == {}
        gpkg.update_tags(tags, ns=namespace)

    with fiona.Env(), fiona.open(test_geopackage, layer=layer) as gpkg:
        assert gpkg.tags(ns=namespace) == tags
        if namespace is not None:
            assert gpkg.tags() == {}
        with pytest.raises(UnsupportedOperation):
            gpkg.update_tags({}, ns=namespace)
Beispiel #13
0
def vector_envelope(path):
    """Get geojson style envelope of vector file
    """
    with fiona.Env(OGR_GEOJSON_MAX_OBJ_SIZE="5000MB"):
        with fiona.open(path, 'r') as vector:
            # bounds = (xmin, ymin, xmax, ymax)
            b = vector.bounds
            env = [[b[0], b[3]], [b[0], b[1]], [b[2], b[1]], [b[2], b[3]]]
            return env
Beispiel #14
0
def open_shapefile(filename, dest_crs):
    with fiona.Env(OSR_WKT_FORMAT="WKT2_2018"), fiona.open(
            filename, encoding='Windows-1252') as shp:
        src_crs = pyproj.CRS.from_wkt(shp.crs_wkt)
        transformer = pyproj.Transformer.from_proj(src_crs,
                                                   dest_crs,
                                                   always_xy=True)
        reproject = lambda geom: transform(transformer.transform, geom)
        yield shp, reproject
def _union(args):
    label, fp, face_ids = args
    shapes = []
    with fiona.Env():
        with fiona.open(fp) as src:
            for feat in src:
                if int(feat["properties"]["id"]) in face_ids:
                    shapes.append(_to_shapely(feat))
    return label, cascaded_union(shapes)
Beispiel #16
0
def process_database(species=None, commit=False):
    """
    Calculates spatial representativeness using alpha hulls

    Generates alpha hulls from each source x taxon combination

    Intersects alpha hulls with range layers, and then calculates percentage of range covered
    """
    session = get_session()

    if commit:
        if species == None:
            session.execute("DELETE FROM taxon_source_alpha_hull")
        else:
            session.execute(
                """DELETE FROM taxon_source_alpha_hull
                WHERE taxon_id IN (SELECT id FROM taxon WHERE spno IN (%s))"""
                % sql_list_placeholder('species', species),
                sql_list_argument('species', species))
        session.commit()

    # Load coastal shapefile
    coastal_shape_filename = tsx.config.config.get("processing.alpha_hull",
                                                   "coastal_shp")
    with fiona.Env(OSR_WKT_FORMAT="WKT2_2018"), fiona.open(
            coastal_shape_filename, 'r') as coastal_shape:
        # Convert from fiona dictionary to shapely geometry and reproject
        shp_to_working_transformer = pyproj.Transformer.from_proj(
            pyproj.CRS.from_wkt(coastal_shape.crs_wkt),
            working_proj,
            always_xy=True)
        coastal_shape = reproject(shape(coastal_shape[0]['geometry']),
                                  shp_to_working_transformer)
        # Simplify coastal boundary - makes things run ~20X faster
        log.info("Simplifying coastal boundary")
        coastal_shape = coastal_shape.buffer(10000).simplify(10000)

    log.info("Generating alpha shapes")

    for data_type in 1, 2:
        log.info("Processing type %s data" % data_type)

        taxa = get_taxa(session, data_type, species)

        tasks = [(taxon_id, coastal_shape, data_type, commit)
                 for taxon_id in taxa]

        # This is important because we are about to spawn child processes, and this stops them attempting to share the
        # same database connection pool
        session.close()  # TODO: not sure if this is needed now

        # Process all the species in parallel
        for result, error in tqdm(run_parallel(process, tasks),
                                  total=len(tasks)):
            if error:
                print(error)
    def check_data(self):
        """Check that definitions file is present, and that faces file is readable."""
        assert os.path.exists(self.data_fp)
        if gis:
            with fiona.Env():
                with fiona.open(self.faces_fp) as src:
                    assert src.meta

        gpkg_hash = json.load(open(self.data_fp))["metadata"]["sha256"]
        assert gpkg_hash == sha256(self.faces_fp)
Beispiel #18
0
def get_shape_from_s3(file_location):
    """
    Loads esri shape file from s3.
    Args:
        file_location : example: s3://bucket/file.shp
    Returns:
        fiona_collection : fiona object conatining features
    """
    fiona_collection = []
    boto3_session = get_boto3_session()
    with fiona.Env(session=fiona.session.AWSSession(boto3_session)):
        print(f"Loading file {file_location}")
        fiona_collection = fiona.open(file_location, driver="'ESRI Shapefile'")
    return fiona_collection
Beispiel #19
0
    def dataset_crs(self):
        """ Provides a CRS of dataset, it loads it lazily (i.e. the first time it is needed)

        :return: Dataset's CRS
        :rtype: CRS
        """
        if self._dataset_crs is None:
            if self.path.startswith('s3://'):
                with fiona.Env(session=self.aws_session):
                    self._read_crs()
            else:
                self._read_crs()

        return self._dataset_crs
Beispiel #20
0
def read_shapefile(path, to_wgs84=True):
    """Reads a shapefile into lists of shapes and properties for each feature
    within the shapefile layer.

    Parameters
    ----------
    path : str
        Path to shapefile. Assumes the shapefile contains one layer with
        all features of interest. Assumes each feature contains 'geometry'
        and 'properties' attributes.

    to_wgs84 : bool
        If True, applies coordinate transformation to WGS84.

    Returns
    -------
    shapes : list
        List of features as shapely shapes.

    properties : list
        List of feature properties (i.e. attributes).

    Notes
    -----
    """
    # updated fiona version with Python 3 requires explicit GDAL_ENV ignore
    # reads shapefile layer
    with fiona.Env():
        with fiona.open(path, 'r') as fiona_collection:
            # define projection transformation function
            if to_wgs84:
                proj_in = pyproj.Proj(fiona_collection.crs)
                proj_out = pyproj.Proj(init='EPSG:4326')  # WGS84
                proj = partial(pyproj.transform, proj_in, proj_out)

            # save layer as list
            layer = list(fiona_collection)

    # get WGS84 shapes and properties
    shapes = []
    properties = []
    for feature in layer:
        shape = geo.asShape(feature['geometry'])
        if to_wgs84:
            shapes.append(ops.transform(proj, shape))
        else:
            shapes.append(shape)
        properties.append(feature['properties'])

    return shapes, properties
Beispiel #21
0
def test_gdal_version_error(tmpdir):
    test_geopackage = str(tmpdir.join("test.gpkg"))
    schema = {'properties': {'CDATA1': 'str:254'}, 'geometry': 'Polygon'}
    with fiona.Env(), fiona.open(test_geopackage,
                                 "w",
                                 driver="GPKG",
                                 schema=schema,
                                 layer="layer") as gpkg:
        with pytest.raises(GDALVersionError):
            gpkg.update_tags({"test_tag1": "test_value1"}, ns="test")
        with pytest.raises(GDALVersionError):
            gpkg.update_tag_item("test_tag1", "test_value1", ns="test")
        with pytest.raises(GDALVersionError):
            gpkg.tags()
        with pytest.raises(GDALVersionError):
            gpkg.get_tag_item("test_tag1")
Beispiel #22
0
def save_to_path(path, **features):
    """
    Save the provided features in the directory specified.
    File names are taken from the keywords.
    """

    if isinstance(path, str):
        path = Path(path)

    path.mkdir(parents=True, exist_ok=True)

    for name, feature in features.items():
        name = name + ".geojson"
        feature_path = path / name
        with fiona.Env():
            feature.to_file(feature_path, driver="GeoJSON")
Beispiel #23
0
    def _load_vector_data(self, bbox):
        """ Loads vector data either from S3 or local path
        """
        bbox_bounds = bbox.transform_bounds(
            self.dataset_crs).geometry.bounds if bbox else None

        if self.path.startswith('s3://'):
            with fiona.Env(session=self.aws_session):
                with fiona.open(self.path, **self.fiona_kwargs) as features:
                    feature_iter = features if bbox_bounds is None else features.filter(
                        bbox=bbox_bounds)

                    return gpd.GeoDataFrame.from_features(
                        feature_iter,
                        columns=list(features.schema['properties']) +
                        ['geometry'],
                        crs=self.dataset_crs.pyproj_crs())

        return gpd.read_file(self.path, bbox=bbox_bounds, **self.fiona_kwargs)
Beispiel #24
0
    def open(cls, filename, crs=None):
        """Creates a FileCollection from a file in disk.

        Parameters
        ----------
        filename : str
            Path of the file to read.
        crs : CRS
            overrides the crs of the collection, this funtion will not reprojects

        """
        with fiona.Env():
            with fiona.open(filename, 'r') as source:
                original_crs = CRS(source.crs)
                schema = source.schema
                length = len(source)
        crs = crs or original_crs
        ret_val = cls(filename, crs, schema, length)
        return ret_val
Beispiel #25
0
    async def _save_isochrones(self):
        if self._isochrones is not None:
            # Defines a polygon feature geometry with one attribute
            schema = {'geometry': 'Polygon', 'properties': {'id': 'int'}}
            poly_id = 0
            with fiona.Env():
                with fiona.open('data/result.shp', 'w', 'ESRI Shapefile',
                                schema) as shp_file:
                    for bounding_poly in self._isochrones:
                        poly_id += 1
                        shp_file.write({
                            'geometry': mapping(bounding_poly),
                            'properties': {
                                'id': poly_id
                            }
                        })

            # Causes the `isochrones_saved` event
            await self.isochrones_saved()
Beispiel #26
0
def vectorize(raster_file, metadata, vector_file, driver, mask_value=None):
    """Extract vector from raster. Vector propably will include polygons with holes.
    
    Args:
    raster_file (ndarray): raster image.
    src (DatasetReader type): Keeps path to filesystem.
    vector_file (string): Pathname of output vector file.
    driver (string): Kind of vector file format.
    mask_value (float or integer): No data value.
    
    Returns:
    Returns None & saves folder containing vector shapefile to cwd or to given path.
    """
    import fiona
    from rasterio.features import shapes
    import datetime as dt

    start = dt.datetime.now()

    if mask_value is not None:
        mask = raster_file == mask_value
    else:
        mask = None
    
    print("Extract id, shapes & values...")
    features = ({'properties': {'raster_val': v}, 'geometry': s} for i, (s, v) in enumerate(
            # The shapes iterator yields geometry, value pairs.
            shapes(raster_file, mask=mask, connectivity=4, transform=metadata['transform'])))

    print("Save to disk...")
    with fiona.Env():
        with fiona.open(
                vector_file, 'w', 
                driver = driver,
                crs = metadata['crs'],
                schema = {'properties': [('raster_val', 'int')], 'geometry': 'Polygon'}) as dst:
            dst.writerecords(features)

    end = dt.datetime.now()
    print("Elapsed time to vectorize raster to shp {}:\n{} mins".format(
        vector_file, (int((end-start).seconds/60)))
    return None
Beispiel #27
0
def main_group(
        ctx, verbose, quiet, aws_profile, aws_no_sign_requests,
        aws_requester_pays):
    """Fiona command line interface.
    """
    verbosity = verbose - quiet
    configure_logging(verbosity)
    ctx.obj = {}
    ctx.obj["verbosity"] = verbosity
    ctx.obj["aws_profile"] = aws_profile
    envopts = {"CPL_DEBUG": (verbosity > 2)}
    if aws_profile or aws_no_sign_requests:
        session = AWSSession(
            profile_name=aws_profile,
            aws_unsigned=aws_no_sign_requests,
            requester_pays=aws_requester_pays,
        )
    else:
        session = DummySession()
    ctx.obj["env"] = fiona.Env(session=session, **envopts)
Beispiel #28
0
 def _read_spatial_file(filepath) -> List[Dict]:
     try:
         with fiona.Env():
             return _read_spatial_data(filepath)
     except AttributeError:
         # older fiona versions
         with fiona.drivers():
             return _read_spatial_data(filepath)
     except NameError as ex:
         msg = "Could not read spatial dimension definition '%s' " % (filepath)
         msg += "Please install fiona to read geographic data files. Try running: \n"
         msg += "    pip install smif[spatial]\n"
         msg += "or:\n"
         msg += "    conda install fiona shapely rtree\n"
         raise SmifDataReadError(msg) from ex
     except IOError as ex:
         msg = "Could not read spatial dimension definition '%s' " % (filepath)
         msg += "Please verify that the path is correct and "
         msg += "that the file is present on this location."
         raise SmifDataNotFoundError(msg) from ex
Beispiel #29
0
def tempfile_from_geojson(geojson):
    """
    Saves any geo-like Python object which implements ``__geo_interface__``
    (e.g. a geopandas.GeoDataFrame or shapely.geometry) to a temporary OGR_GMT
    text file.

    Parameters
    ----------
    geojson : geopandas.GeoDataFrame
        A geopandas GeoDataFrame, or any geo-like Python object which
        implements __geo_interface__, i.e. a GeoJSON.

    Yields
    ------
    tmpfilename : str
        A temporary OGR_GMT format file holding the geographical data.
        E.g. '1a2b3c4d5e6.gmt'.
    """
    with GMTTempFile(suffix=".gmt") as tmpfile:
        os.remove(tmpfile.name)  # ensure file is deleted first
        ogrgmt_kwargs = dict(filename=tmpfile.name, driver="OGR_GMT", mode="w")
        try:
            # Using geopandas.to_file to directly export to OGR_GMT format
            geojson.to_file(**ogrgmt_kwargs)
        except AttributeError:
            # pylint: disable=import-outside-toplevel
            # Other 'geo' formats which implement __geo_interface__
            import json

            import fiona
            import geopandas as gpd

            with fiona.Env():
                jsontext = json.dumps(geojson.__geo_interface__)
                # Do Input/Output via Fiona virtual memory
                with fiona.io.MemoryFile(
                        file_or_bytes=jsontext.encode()) as memfile:
                    geoseries = gpd.GeoSeries.from_file(filename=memfile)
                    geoseries.to_file(**ogrgmt_kwargs)

        yield tmpfile.name
Beispiel #30
0
def merge(shp):                         # this function will merge all the county polygons in Dublin.
    if __name__ == "__main__":
        shp = "ctygeom.shp"
        features = []
        with fiona.Env():
            with fiona.open(shp, "r") as fh:
                for feature in fh:
                    if "ingal" in feature["properties"]["countyname"]: #first for loop finds all dublin counties under 'countyname'
                        features.append(feature)
                    elif "ublin_city" in feature["properties"]["countyname"]:
                        features.append(feature)
                    elif "outh_dublin" in feature["properties"]["countyname"]:
                        features.append(feature)
                    elif "dunlaoghaire–rathdown" in feature["properties"]["countyname"]:
                        features.append(feature)
                    else:
                        print("does no compute")
        result = geom_info(features[0], features[1]) #all counties are put in features list
        print("g1 Info\n" + "-" * 20)       #the list is then accessed with a for loop and the cascade_union method is applied to merge.
        for k in result[0].items():
            k = shapely.cascade_union(result)
    return