def write_shapefile(self, cells, out_shp, data=False):

        driver = ogr.GetDriverByName('Esri Shapefile')
        ds = driver.CreateDataSource(out_shp)
        layer = ds.CreateLayer('', None, ogr.wkbPolygon)

        layer.CreateField(ogr.FieldDefn('id', ogr.OFTString))
        if data:
            layer.CreateField(ogr.FieldDefn('data', ogr.OFSTJSON))
        defn = layer.GetLayerDefn()

        for cell in cells:
            feat = ogr.Feature(defn)
            feat.SetField('id', cell['id'])

            geom = ogr.CreateGeometryFromWkt(cell['cell_poly'])
            feat.SetGeometry(geom)

            if data:
                if cell['id'] in cell['data']:
                    feat.SetField('data', json.dumps(cell['data'][cell['id']]))
                else:
                    feat.SetField('data', json.dumps(cell['data']))
            layer.CreateFeature(feat)

        feat = geom = None  # destroy these
        ds = layer = feat = geom = None
Beispiel #2
0
def save_polygons(poly, output_folder, fname, meta=None):

    driver = ogr.GetDriverByName('Esri Shapefile')
    ds = driver.CreateDataSource(output_folder + '{}.shp'.format(fname))
    srs = osr.SpatialReference()
    srs.ImportFromEPSG(4326)

    layer = ds.CreateLayer('', srs, ogr.wkbMultiPolygon)

    # Add one attribute
    layer.CreateField(ogr.FieldDefn('id', ogr.OFTInteger))
    defn = layer.GetLayerDefn()

    ## If there are multiple geometries, put the "for" loop here

    # Create a new feature (attribute and geometry)
    feat = ogr.Feature(defn)
    feat.SetField('id', 123)

    # Make a geometry, from Shapely object
    geom = ogr.CreateGeometryFromWkb(poly.wkb)
    feat.SetGeometry(geom)

    layer.CreateFeature(feat)

    feat = geom = None  # destroy these

    # Save and close everything
    ds = layer = feat = geom = None
def create_index_fld(input_shp,
                     class_names,
                     output_name='training_indexed.shp'):
    """
    Creates an extra field in shp with unique index value for each polygon
    input_shp - input shapefile
    class_names - list of unique class names to classify
    output name - output shapefile name
    """
    data_shp = input_shp
    driver_shp = ogr.GetDriverByName('ESRI Shapefile')
    vector = driver_shp.Open(data_shp, 1)
    lyr = vector.GetLayer()
    directory_out = os.getcwd()
    # if file with given name exists delete
    if output_name + '.shp' in os.listdir(directory_out):
        driver_shp.DeleteDataSource(output_name + '.shp')
    print('created file', output_name)
    out_ds = driver_shp.CreateDataSource(directory_out)

    lyr_copy = out_ds.CopyLayer(lyr, output_name)
    fieldDefn = ogr.FieldDefn('indeks', ogr.OFTInteger)
    fieldDefn.SetWidth(1)
    lyr_copy.CreateField(fieldDefn)

    for nb, f in enumerate(lyr_copy):
        f.SetField('indeks', nb)
        lyr_copy.SetFeature(f)

    fieldDefn = ogr.FieldDefn('kod', ogr.OFTInteger)
    fieldDefn.SetWidth(10)
    lyr_copy.CreateField(fieldDefn)

    code = 1
    for a in class_names:
        print(class_names[code - 1])
        lyr_copy.SetAttributeFilter("klasa = '{0}'".format(class_names[code -
                                                                       1]))
        for f in lyr_copy:
            f.SetField('kod', code)
            lyr_copy.SetFeature(f)
        code += 1
    print('created')
    return output_name + '.shp'
Beispiel #4
0
def vectorize_data():

    setup_env()

    temp_path = os.getenv("TEMP_PATH")

    filelist = create_filelist()

    print("Starting vectorization...")
    for file in tqdm(filelist, unit=" file"):
        file_split = file.split("/")
        date_time_obj = datetime.strptime(
            file_split[len(file_split)-1], 'RW_%Y%m%d-%H%M.asc')

        filename_input = temp_path + "/cropped/{}".format(
            date_time_obj.strftime("%Y%m%d-%H%M"))
        filename_output = temp_path + "/vectorized/{}".format(
            date_time_obj.strftime("%Y%m%d-%H%M"))

        source = gdal.Open(filename_input + ".tif")
        band = source.GetRasterBand(1)
        _ = band.ReadAsArray()

        driver = ogr.GetDriverByName("ESRI Shapefile")

        if os.path.exists(filename_output + ".shp"):
            driver.DeleteDataSource(filename_output + ".shp")

        target = driver.CreateDataSource(filename_output + ".shp")

        srs = osr.SpatialReference()
        srs.ImportFromProj4(
            "+proj=stere +lon_0=10.0 +lat_0=90.0 +lat_ts=60.0 +a=6370040 +b=6370040 +units=m")

        targetLayer = target.CreateLayer("radolan", srs=srs)
        targetField = ogr.FieldDefn("rain", ogr.OFTInteger)
        targetLayer.CreateField(targetField)

        gdal.Polygonize(band, None, targetLayer, 0, [], callback=None)

        target.Destroy()
        source = None
        _ = None

    print("Vectorization complete.")
Beispiel #5
0
# set up the shapefile driver
driver = ogr.GetDriverByName("ESRI Shapefile")

# create the data source
data_source = driver.CreateDataSource(csvname + ".shp")

# create the spatial reference, WGS84
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)

# create the layer
layer = data_source.CreateLayer(csvname + "_Aggregations", srs, ogr.wkbPoint)

# Add the fields we're interested in
field_name = ogr.FieldDefn("Name", ogr.OFTString)
field_name.SetWidth(24)
layer.CreateField(field_name)
layer.CreateField(ogr.FieldDefn("Latitude", ogr.OFTReal))
layer.CreateField(ogr.FieldDefn("Longitude", ogr.OFTReal))

with open(csvfile, 'r') as csvf:
    reader = csv.DictReader(csvf)
    # Process the text file and add the attributes and features to the shapefile
    for row in reader:
        # create the feature
        feature = ogr.Feature(layer.GetLayerDefn())
        # Set the attributes using the values from the delimited text file
        feature.SetField("Name", row['geohash'])
        feature.SetField("Latitude", row['c_y'])
        feature.SetField("Longitude", row['c_x'])
Beispiel #6
0
# https://github.com/GeospatialPython/Learn/raw/master/islands.zip

import gdal
from gdal import ogr, osr

# Thresholded input raster name
src = "islands_classified.tiff"
# Output shapefile name
tgt = "extract.shp"
# OGR layer name
tgtLayer = "extract"
# Open the input raster
srcDS = gdal.Open(src)
# Grab the first band
band = srcDS.GetRasterBand(1)
# Force gdal to use the band as a mask
mask = band
# Set up the output shapefile
driver = ogr.GetDriverByName("ESRI Shapefile")
shp = driver.CreateDataSource(tgt)
# Copy the spatial reference
srs = osr.SpatialReference()
srs.ImportFromWkt(srcDS.GetProjectionRef())
layer = shp.CreateLayer(tgtLayer, srs=srs)
# Set up the dbf file
fd = ogr.FieldDefn("DN", ogr.OFTInteger)
layer.CreateField(fd)
dst_field = 0
# Automatically extract features from an image!
extract = gdal.Polygonize(band, mask, layer, dst_field, [], None)
Beispiel #7
0
    def _tiger_to_tract(self, infile):
        """ Converts collection of Census Tiger files into a geopandas.GeoDataFrame of census tracts
            Modified from original at
            https://svn.osgeo.org/gdal/tags/1.4.3/gdal/pymod/samples/tigerpoly.py
        """

        class Module(object):
            def __init__(mod):
                mod.lines = {}
                mod.poly_line_links = {}

        outfile = 'tracts.shp'

        # Open the datasource to operate on.
        ds = ogr.Open(infile, update=0)
        poly_layer = ds.GetLayerByName('Polygon')

        # Create output file for the composed polygons.
        nad83 = osr.SpatialReference()
        nad83.SetFromUserInput('NAD83')

        shp_driver = ogr.GetDriverByName('ESRI Shapefile')
        shp_driver.DeleteDataSource(outfile)

        shp_ds = shp_driver.CreateDataSource(outfile)
        shp_layer = shp_ds.CreateLayer(
            'out', geom_type=ogr.wkbPolygon, srs=nad83)

        src_defn = poly_layer.GetLayerDefn()
        poly_field_count = src_defn.GetFieldCount()

        for fld_index in range(poly_field_count):
            src_fd = src_defn.GetFieldDefn(fld_index)

            fd = ogr.FieldDefn(src_fd.GetName(), src_fd.GetType())
            fd.SetWidth(src_fd.GetWidth())
            fd.SetPrecision(src_fd.GetPrecision())
            shp_layer.CreateField(fd)

        # Read all features in the line layer, holding just the geometry in a hash
        # for fast lookup by TLID.

        line_layer = ds.GetLayerByName('CompleteChain')
        line_count = 0

        modules_hash = {}

        feat = line_layer.GetNextFeature()
        geom_id_field = feat.GetFieldIndex('TLID')
        tile_ref_field = feat.GetFieldIndex('MODULE')
        while feat is not None:
            geom_id = feat.GetField(geom_id_field)
            tile_ref = feat.GetField(tile_ref_field)

            try:
                module = modules_hash[tile_ref]
            except:
                module = Module()
                modules_hash[tile_ref] = module

            module.lines[geom_id] = feat.GetGeometryRef().Clone()
            line_count = line_count + 1

            feat.Destroy()

            feat = line_layer.GetNextFeature()

        # Read all polygon/chain links and build a hash keyed by POLY_ID listing
        # the chains (by TLID) attached to it.

        link_layer = ds.GetLayerByName('PolyChainLink')

        feat = link_layer.GetNextFeature()
        geom_id_field = feat.GetFieldIndex('TLID')
        tile_ref_field = feat.GetFieldIndex('MODULE')
        lpoly_field = feat.GetFieldIndex('POLYIDL')
        rpoly_field = feat.GetFieldIndex('POLYIDR')

        link_count = 0

        while feat is not None:
            module = modules_hash[feat.GetField(tile_ref_field)]

            tlid = feat.GetField(geom_id_field)

            lpoly_id = feat.GetField(lpoly_field)
            rpoly_id = feat.GetField(rpoly_field)

            if lpoly_id == rpoly_id:
                feat.Destroy()
                feat = link_layer.GetNextFeature()
                continue

            try:
                module.poly_line_links[lpoly_id].append(tlid)
            except:
                module.poly_line_links[lpoly_id] = [tlid]

            try:
                module.poly_line_links[rpoly_id].append(tlid)
            except:
                module.poly_line_links[rpoly_id] = [tlid]

            link_count = link_count + 1

            feat.Destroy()

            feat = link_layer.GetNextFeature()

        # Process all polygon features.

        feat = poly_layer.GetNextFeature()
        tile_ref_field = feat.GetFieldIndex('MODULE')
        polyid_field = feat.GetFieldIndex('POLYID')

        degenerate_count = 0

        while feat is not None:
            module = modules_hash[feat.GetField(tile_ref_field)]
            polyid = feat.GetField(polyid_field)

            tlid_list = module.poly_line_links[polyid]

            link_coll = ogr.Geometry(type=ogr.wkbGeometryCollection)
            for tlid in tlid_list:
                geom = module.lines[tlid]
                link_coll.AddGeometry(geom)

            try:
                poly = ogr.BuildPolygonFromEdges(link_coll)

                if poly.GetGeometryRef(0).GetPointCount() < 4:
                    degenerate_count = degenerate_count + 1
                    poly.Destroy()
                    feat.Destroy()
                    feat = poly_layer.GetNextFeature()
                    continue

                feat2 = ogr.Feature(feature_def=shp_layer.GetLayerDefn())

                for fld_index in range(poly_field_count):
                    feat2.SetField(fld_index, feat.GetField(fld_index))

                feat2.SetGeometryDirectly(poly)

                shp_layer.CreateFeature(feat2)
                feat2.Destroy()

            except:
                warn('BuildPolygonFromEdges failed.')

            feat.Destroy()

            feat = poly_layer.GetNextFeature()

        if degenerate_count:
            warn('Discarded %d degenerate polygons.' % degenerate_count)

        # Cleanup

        shp_ds.Destroy()
        shp_ds = None
        ds.Destroy()
        ds = None

        # build a fully-qualified fips code and dissolve on it to create tract geographies
        gdf = gpd.read_file(outfile)

        if "CTBNA90" in gdf.columns:

            gdf = gdf.rename(columns={"CTBNA90": 'TRACT', "BLK90": "BLOCK"})

        gdf['STATE'] = gdf['STATE'].astype(str).str.rjust(2, "0")
        gdf['COUNTY'] = gdf['COUNTY'].astype(str).str.rjust(3, "0")
        gdf['TRACT'] = gdf['TRACT'].astype(str).str.rjust(6, "0")
        gdf['BLOCK'] = gdf['BLOCK'].astype(str).str.rjust(4, "0")
        gdf['fips'] = gdf.STATE + gdf.COUNTY + gdf.TRACT
        if self.geom == 'block':
            gdf['fips'] += gdf.BLOCK

        gdf = gdf.dropna(subset=['fips'])
        gdf.geometry = gdf.buffer(0)
        gdf = gdf.dissolve(by='fips')
        gdf.reset_index(inplace=True)

        shp_driver.DeleteDataSource(outfile)

        return gdf
Beispiel #8
0
            band_dict[i + 1] = file_only
        i += 1

        class_file = os.path.join(training_folder, sub, r'classification.tif')
        f.write(class_file)
        file_only = os.path.basename(os.path.normpath(class_file))
        band_dict[i + 1] = file_only

    # take the vector shapefile and convert the alphabetic codes to numeric codes
    print('Mapping codes')
    vector_classes = os.path.join(train_folder, r'classification.shp')
    new_field_name = 'Classvalue'
    driver = ogr.GetDriverByName("ESRI Shapefile")
    dataSource = driver.Open(vector_classes, 1)
    layer = dataSource.GetLayer()
    new_field = ogr.FieldDefn(new_field_name, ogr.OFTInteger)

    field_names = [field.name for field in layer.schema]

    if new_field_name in field_names:
        layer.DeleteField(field_names.index(new_field_name))
    layer.CreateField(new_field)

    for feature in layer:
        alpha_code = feature.GetField('Classname')
        num_code = code_dict[alpha_code]
        feature.SetField(new_field_name, num_code)
        layer.SetFeature(feature)

    dataSource = None