def provision(bbox: BBOX, run_id: str) -> List[str]: run_directory = get_run_data_path(run_id, (CACHE_DIR_NAME,)) os.makedirs(run_directory) driver = ogr.GetDriverByName("GPKG") grid_datasource = driver.Open(get_data_path(("grids.gpkg",))) grid_layer = grid_datasource.GetLayerByName("BC-20000") grid_layer.SetSpatialFilterRect(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y) bbox_cells = list() while grid_cell := grid_layer.GetNextFeature(): cell_name = grid_cell.GetFieldAsString("MAP_TILE") cell_parent = re.search(r"^\d{2,3}[a-z]", cell_name, re.IGNORECASE)[0] bbox_cells.append( GenerationRequest( url=f"https://pub.data.gov.bc.ca/datasets/177864/tif/bcalb/{cell_parent}/{cell_name}.zip", path=get_cache_path((CACHE_DIR_NAME, f"{cell_name}.zip")), expected_type="application/zip", cell_name=cell_name, tif_name=f"{cell_name}.tif", tif_path=get_cache_path((CACHE_DIR_NAME, f"{cell_name}.tif")), prj_path=get_cache_path((CACHE_DIR_NAME, f"{cell_name}_prj.tif")), run_path=get_run_data_path( run_id, (CACHE_DIR_NAME, f"{cell_name}.tif") ), ) )
def write_shapefile(self, cells, out_shp, data=False): driver = ogr.GetDriverByName('Esri Shapefile') ds = driver.CreateDataSource(out_shp) layer = ds.CreateLayer('', None, ogr.wkbPolygon) layer.CreateField(ogr.FieldDefn('id', ogr.OFTString)) if data: layer.CreateField(ogr.FieldDefn('data', ogr.OFSTJSON)) defn = layer.GetLayerDefn() for cell in cells: feat = ogr.Feature(defn) feat.SetField('id', cell['id']) geom = ogr.CreateGeometryFromWkt(cell['cell_poly']) feat.SetGeometry(geom) if data: if cell['id'] in cell['data']: feat.SetField('data', json.dumps(cell['data'][cell['id']])) else: feat.SetField('data', json.dumps(cell['data'])) layer.CreateFeature(feat) feat = geom = None # destroy these ds = layer = feat = geom = None
def addBuffer(file,size, new_file): fgc_shp = ogr.Open(file) layer = fgc_shp.GetLayer() driver=ogr.GetDriverByName('ESRI Shapefile') ds=driver.CreateDataSource(new_file) out_lyr=ds.CreateLayer('temp', layer.GetSpatialRef(), ogr.wkbPolygon) out_lyr = createFieldsFrom(layer, out_lyr) for feature in layer: geom = feature.GetGeometryRef() if geom != None: new_feat = ogr.Feature(out_lyr.GetLayerDefn()) new_feat.SetFrom(feature) new_feat.SetGeometry(geom.Buffer(size)) out_lyr.CreateFeature(new_feat) # else: #out_lyr.CreateFeature(feature) new_feat = None geom = None out_lyr= None ds=None driver = None layer = None fgc_shp = None
def save_polygons(poly, output_folder, fname, meta=None): driver = ogr.GetDriverByName('Esri Shapefile') ds = driver.CreateDataSource(output_folder + '{}.shp'.format(fname)) srs = osr.SpatialReference() srs.ImportFromEPSG(4326) layer = ds.CreateLayer('', srs, ogr.wkbMultiPolygon) # Add one attribute layer.CreateField(ogr.FieldDefn('id', ogr.OFTInteger)) defn = layer.GetLayerDefn() ## If there are multiple geometries, put the "for" loop here # Create a new feature (attribute and geometry) feat = ogr.Feature(defn) feat.SetField('id', 123) # Make a geometry, from Shapely object geom = ogr.CreateGeometryFromWkb(poly.wkb) feat.SetGeometry(geom) layer.CreateFeature(feat) feat = geom = None # destroy these # Save and close everything ds = layer = feat = geom = None
def create_shp(shp_file_dir, overwrite=True, *args, **kwargs): """ Create a new shapefile with a defined geometry type (optional) :param shp_file_dir: STR of the (relative shapefile directory (ends on ".shp") :param overwrite: [optional] BOOL - if True, existing files are overwritten :kwarg layer_name: [optional] STR of the layer_name - if None: no layer will be created :kwarg layer_type: [optional] STR ("point, "line", or "polygon") of the layer_name - if None: no layer will be created :output: ogr shapefile """ shp_driver = ogr.GetDriverByName("ESRI Shapefile") # check if output file exists if yes delete it if os.path.exists(shp_file_dir) and overwrite: shp_driver.DeleteDataSource(shp_file_dir) # create and return new shapefile object new_shp = shp_driver.CreateDataSource(shp_file_dir) # create layer if layer_name and layer_type are provided if kwargs.get("layer_name") and kwargs.get("layer_type"): # create dictionary of ogr.SHP-TYPES geometry_dict = {"point": ogr.wkbPoint, "points": ogr.wkbMultiPoint, "line": ogr.wkbMultiLineString, "polygon": ogr.wkbMultiPolygon} # create layer try: new_shp.CreateLayer(str(kwargs.get("layer_name")), geom_type=geometry_dict[str(kwargs.get("layer_type").lower())]) except KeyError: print("Error: Invalid layer_type provided (must be 'point', 'line', or 'polygon').") except TypeError: print("Error: layer_name and layer_type must be string.") return new_shp
def extent_polygon(file,out): fgc_shp = ogr.Open(file) lyr = fgc_shp.GetLayer() extent = lyr.GetExtent() ring = ogr.Geometry(ogr.wkbLinearRing) ring.AddPoint(extent[0], extent[2]) ring.AddPoint(extent[1], extent[2]) ring.AddPoint(extent[1], extent[3]) ring.AddPoint(extent[0], extent[3]) ring.AddPoint(extent[0], extent[2]) poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(ring) driver=ogr.GetDriverByName('ESRI Shapefile') ds=driver.CreateDataSource(out) extent_lyr = ds.CreateLayer('temp', lyr.GetSpatialRef(), ogr.wkbMultiPolygon ) new_feat = ogr.Feature(extent_lyr.GetLayerDefn()) new_feat.SetGeometry(poly) extent_lyr.CreateFeature(new_feat) extent_lyr = None ds = None driver = None new_feat = None lyr = None fgc_shp = None
def provision(bbox: BBOX, run_id: str) -> List[str]: run_directory = get_run_data_path(run_id, (CACHE_DIR_NAME, )) os.makedirs(run_directory) driver = ogr.GetDriverByName("GPKG") grid_datasource = driver.Open(get_data_path(("grids.gpkg", ))) grid_layer = grid_datasource.GetLayerByName("Canada-50000") grid_layer.SetSpatialFilterRect(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y) bbox_cells = list() while grid_cell := grid_layer.GetNextFeature(): cell_name = grid_cell.GetFieldAsString("NTS_SNRC") cell_parent = re.sub( "^0", "", re.search(r"^\d{2,3}[a-z]", cell_name, re.IGNORECASE)[0]) for cardinal in ("e", "w"): cell_part_name = f"{cell_name.lower()}_{cardinal}" zip_file_name = f"{cell_part_name}.dem.zip" bbox_cells.append( GenerationRequest( url= f"https://pub.data.gov.bc.ca/datasets/175624/{cell_parent.lower()}/{zip_file_name}", path=get_cache_path((CACHE_DIR_NAME, zip_file_name)), expected_type="application/zip", dem_path=get_cache_path( (CACHE_DIR_NAME, f"{cell_part_name}.dem")), prj_path=get_cache_path( (CACHE_DIR_NAME, f"{cell_part_name}_prj.tif")), hs_path=get_cache_path( (CACHE_DIR_NAME, f"{cell_part_name}_hs.tif")), run_path=get_run_data_path( run_id, (CACHE_DIR_NAME, f"{cell_part_name}.tif")), ))
def rasterize(in_raster, out_raster_name, shp_in): """ Funkcja do rasteryzacji shapefila. Raster wejscie - raster, na podstawie którego utworzony zostanie raster wyjsciowy (ten sam układ wsp., wymiary, rodzielczosć) Raster wyjscie - scieżka do pliku wyjsciowego tiff shp_wejscie - dane, na podstawie ktorych tworzona bedzie maska. Scieżka dostępu do pliku shp. atrybut - wartosc opcjonalna, ktora przypisana zostanie zrasteryzowanym polom. Dobierana na podstawie atrybutu w pliku shp. """ driver_raster = gdal.GetDriverByName("ENVI") driver_raster.Register() raster_in = gdal.Open(in_raster, gdalconst.GA_ReadOnly) driver_shp = ogr.GetDriverByName("ESRI Shapefile") shp_in = driver_shp.Open(shp_in, 1) shp_lyr = shp_in.GetLayer() ncol = raster_in.RasterXSize nrow = raster_in.RasterYSize proj = raster_in.GetProjectionRef() ext = raster_in.GetGeoTransform() # Create the raster dataset memory_driver = gdal.GetDriverByName("GTiff") out_raster_ds = memory_driver.Create(out_raster_name, ncol, nrow, 2, gdal.GDT_UInt16) out_raster_ds.SetProjection(proj) out_raster_ds.SetGeoTransform(ext) out_raster = out_raster_ds.ReadAsArray() for i in range(2): out_raster[i].fill(-999) status0 = gdal.RasterizeLayer( out_raster_ds, [2], shp_lyr, None, None, options=["ALL_TOUCHED=TRUE", "ATTRIBUTE={0}".format("kod")], ) status = gdal.RasterizeLayer( out_raster_ds, [1], shp_lyr, None, None, options=["ALL_TOUCHED=TRUE", "ATTRIBUTE={0}".format("indeks")], ) out_raster_ds = None if status != 0: print("I don't think it worked...") else: print("Success")
def merge_files(f1, f2, out): print("Merging files...") try: fgc_shp = ogr.Open(f1) lyr1 = fgc_shp.GetLayer() fgc_shp2 = ogr.Open(f2) lyr2 = fgc_shp2.GetLayer() driver=ogr.GetDriverByName('ESRI Shapefile') ds=driver.CreateDataSource(out) merge_lyr = ds.CreateLayer('temp', lyr2.GetSpatialRef(), ogr.wkbMultiPolygon ) # diff_lyr = createFieldsFrom(lyr2, diff_lyr) print(" First file uninon..") union1 = ogr.Geometry(ogr.wkbMultiPolygon) for feat1 in lyr1: geom1 = feat1.GetGeometryRef() if geom1 != None: union1 = union1.Union(geom1) geom1 = None print(" Second file uninon..") union2 = ogr.Geometry(ogr.wkbMultiPolygon) for feat2 in lyr2: geom2 = feat2.GetGeometryRef() if geom2 != None: union2 = union2.Union(geom2) geom2 = None union1 = union1.Buffer(0) union2 = union2.Buffer(0) print(" Final uninon..") merge = union1.Union(union2) new_feat = ogr.Feature(merge_lyr.GetLayerDefn()) new_feat.SetGeometry(merge) merge_lyr.CreateFeature(new_feat) except: print("exception thrown!") traceback.print_exc() new_feat= None union1 = None union2 = None diff_lyr = None fgc_shp = None fgc_shp2 = None ds = None lyr1 = None lyr2 = None
def generate_geojson_features(self): """ Generates and yields a series of storm forecasts, one for each feature in <self.filepath>. Observations are returned as Elasticsearch bulk API upsert actions, with documents in GeoJSON to match the Elasticsearch index mappings. :returns: Generator of Elasticsearch actions to upsert the storm forecasts """ driver = ogr.GetDriverByName('ESRI Shapefile') filepath = str(self.filepath.resolve()) data = driver.Open(filepath, 0) lyr = data.GetLayer(0) file_datetime_str = strftime_rfc3339(self.date_) for feature in lyr: feature_json = feature.ExportToJson(as_object=True) feature_json['properties']['active'] = True feature_json['properties']['filename'] = self.filepath.stem feature_json['properties']['filedate'] = file_datetime_str # noqa # TODO: Remove once upstream data is patched # clean rad consecutive coordinates in geometry (temporary fix) if self.storm_variable == 'rad': feature_json['geometry'][ 'coordinates'] = self.clean_consecutive_coordinates( feature_json['geometry']['coordinates']) # format pts ADVDATE if self.storm_variable == 'pts': feature_json['properties']['ADVDATE'] = \ strftime_rfc3339( datetime.strptime( feature_json['properties']['ADVDATE'], '%y%m%d/%H%M' ) ) self.items.append(feature_json) action = { '_id': '{}-{}-{}-{}-{}'.format(self.storm_name, self.storm_variable, file_datetime_str, self.fh, feature_json['id']), '_index': INDEX_NAME.format(self.storm_variable), '_op_type': 'update', 'doc': feature_json, 'doc_as_upsert': True } yield action
def test_concav(file,out): points_by_id = get_points_from_geomety(file) fgc_shp = ogr.Open(file) lyr = fgc_shp.GetLayer() driver=ogr.GetDriverByName('ESRI Shapefile') ds1=driver.CreateDataSource(out) concave_lyr=ds1.CreateLayer('temp', lyr.GetSpatialRef(), ogr.wkbPolygon) concave_lyr = createFieldsFrom(lyr,concave_lyr) print("starting concave") ps = np.array([ ( points_by_id.iloc[xi,0] , np.array(points_by_id.iloc[xi,1]) ) for xi in range(0,points_by_id.shape[0]) ], dtype=object) #Processar em várias threads pool = multiprocessing.Pool(processes=multiprocessing.cpu_count()) res = pool.map( temp,ps ) print("End of concave calculations.") print("Saving to file") res = np.asarray(res,dtype=object) for i in range(0, res.shape[0]): try: fgc_id = res[i][0] hull = res[i][1] new_ring = ogr.Geometry(ogr.wkbLinearRing) hull = np.asarray(hull) for i in range(0, hull.shape[0]): new_ring.AddPoint(hull[i,0], hull[i,1]) poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(new_ring) new_feat = ogr.Feature(concave_lyr.GetLayerDefn()) new_feat.SetField("ID_SEQ",fgc_id) new_feat.SetGeometry(poly) concave_lyr.CreateFeature(new_feat) new_feat = None except: print("exception thrown!") traceback.print_exc() break
def provision(bbox: BBOX, run_id: str) -> List[str]: run_directory = get_run_data_path(run_id, (CACHE_DIR_NAME, )) os.makedirs(run_directory) driver = ogr.GetDriverByName("GPKG") datasource = driver.Open(get_local_features_path()) result = ogr_to_shp( bbox, [datasource.GetLayerByName("trails")], os.path.join(run_directory, "trails.shp"), "trails", OUTPUT_CRS_CODE, ) datasource = None return result
def record_run(result_dir: str, bbox: BBOX) -> None: gpkg_path = _get_gpkg_path(result_dir) gpkg_datasource = GPKG_DRIVER.Open(gpkg_path, 1) if not gpkg_datasource: gpkg_datasource = GPKG_DRIVER.CreateDataSource(gpkg_path) cumulative_layer = gpkg_datasource.GetLayerByName(LAYER_NAME) if not cumulative_layer: srs = osr.SpatialReference() srs.SetFromUserInput("CRS:84") cumulative_layer = gpkg_datasource.CreateLayer(LAYER_NAME, srs, ogr.wkbPolygon) geometry = ogr.CreateGeometryFromWkt(bbox.get_wkt()) feature_defn = cumulative_layer.GetLayerDefn() feature = ogr.Feature(feature_defn) feature.SetGeometryDirectly(geometry) cumulative_layer.CreateFeature(feature) kml_path = os.path.join(result_dir, "coverage.kml") if os.path.exists(kml_path): os.remove(kml_path) kml_driver = ogr.GetDriverByName("KML") kml_datasource = kml_driver.CreateDataSource(kml_path) kml_datasource.CopyLayer(cumulative_layer, "areas") geojson_path = os.path.join(result_dir, "coverage.geojson") if os.path.exists(geojson_path): os.remove(geojson_path) geojson_driver = ogr.GetDriverByName("GeoJSON") geojson_datasource = geojson_driver.CreateDataSource(geojson_path) geojson_datasource.CopyLayer(cumulative_layer, "areas") cumulative_layer, gpkg_datasource, kml_datasource, geojson_datasource = ( None, None, None, None, )
def remove_biggest_polygon(f1, area, out): print("Removing biggest polygon") fgc_shp = ogr.Open(f1) lyr = fgc_shp.GetLayer() driver=ogr.GetDriverByName('ESRI Shapefile') ds=driver.CreateDataSource(out) holes_lyr = ds.CreateLayer('temp', lyr.GetSpatialRef(), ogr.wkbMultiPolygon ) max_area = 0.0 max_index = 0 for feature in lyr: geom = feature.GetGeometryRef() print(geom.GetGeometryCount()) for j in range(0,geom.GetGeometryCount() ): ring = geom.GetGeometryRef(j) area = ring.GetArea() print(area) if area> max_area: max_index = j max_area = area lyr =None fgc_shp = None # print("max= ", max_index) fgc_shp2 = ogr.Open(f1) lyr2 = fgc_shp2.GetLayer() for feature in lyr2: geom = feature.GetGeometryRef() for j in range(0,geom.GetGeometryCount()): ring = geom.GetGeometryRef(j) if j != max_index: new_feat = ogr.Feature(holes_lyr.GetLayerDefn()) new_feat.SetGeometry(ring.Buffer(0)) holes_lyr.CreateFeature(new_feat) new_feat = None # print(ring.GetArea()) holes_lyr = None ds=None lyr =None fgc_shp = None
def get_class_names(shp): """ Function gets unique class names from shp file shp - input shapefile """ driver_shp = ogr.GetDriverByName('ESRI Shapefile') data = driver_shp.Open(shp, 1) layer = data.GetLayer() feature = layer.GetNextFeature() field_vals = [] while feature: field_vals.append(feature.GetFieldAsString('klasa')) feature = layer.GetNextFeature() vals = np.unique(field_vals) return vals
def provision(bbox: BBOX, run_id: str) -> List[str]: run_directory = get_run_data_path(run_id, (CACHE_DIR_NAME, )) os.makedirs(run_directory) driver = ogr.GetDriverByName("ESRI Shapefile") datasource = driver.Open( get_data_path(("FTEN_ROAD_SECTION_LINES_SVW", "FTEN_RS_LN_line.shp"))) result = ogr_to_shp( bbox, [datasource.GetLayerByIndex(0)], os.path.join(run_directory, "bc_resource_roads.shp"), "bc_resource_roads", OUTPUT_CRS_CODE, ) datasource = None return result
def vectorize_data(): setup_env() temp_path = os.getenv("TEMP_PATH") filelist = create_filelist() print("Starting vectorization...") for file in tqdm(filelist, unit=" file"): file_split = file.split("/") date_time_obj = datetime.strptime( file_split[len(file_split)-1], 'RW_%Y%m%d-%H%M.asc') filename_input = temp_path + "/cropped/{}".format( date_time_obj.strftime("%Y%m%d-%H%M")) filename_output = temp_path + "/vectorized/{}".format( date_time_obj.strftime("%Y%m%d-%H%M")) source = gdal.Open(filename_input + ".tif") band = source.GetRasterBand(1) _ = band.ReadAsArray() driver = ogr.GetDriverByName("ESRI Shapefile") if os.path.exists(filename_output + ".shp"): driver.DeleteDataSource(filename_output + ".shp") target = driver.CreateDataSource(filename_output + ".shp") srs = osr.SpatialReference() srs.ImportFromProj4( "+proj=stere +lon_0=10.0 +lat_0=90.0 +lat_ts=60.0 +a=6370040 +b=6370040 +units=m") targetLayer = target.CreateLayer("radolan", srs=srs) targetField = ogr.FieldDefn("rain", ogr.OFTInteger) targetLayer.CreateField(targetField) gdal.Polygonize(band, None, targetLayer, 0, [], callback=None) target.Destroy() source = None _ = None print("Vectorization complete.")
def create_index_fld(input_shp, class_names, output_name='training_indexed.shp'): """ Creates an extra field in shp with unique index value for each polygon input_shp - input shapefile class_names - list of unique class names to classify output name - output shapefile name """ data_shp = input_shp driver_shp = ogr.GetDriverByName('ESRI Shapefile') vector = driver_shp.Open(data_shp, 1) lyr = vector.GetLayer() directory_out = os.getcwd() # if file with given name exists delete if output_name + '.shp' in os.listdir(directory_out): driver_shp.DeleteDataSource(output_name + '.shp') print('created file', output_name) out_ds = driver_shp.CreateDataSource(directory_out) lyr_copy = out_ds.CopyLayer(lyr, output_name) fieldDefn = ogr.FieldDefn('indeks', ogr.OFTInteger) fieldDefn.SetWidth(1) lyr_copy.CreateField(fieldDefn) for nb, f in enumerate(lyr_copy): f.SetField('indeks', nb) lyr_copy.SetFeature(f) fieldDefn = ogr.FieldDefn('kod', ogr.OFTInteger) fieldDefn.SetWidth(10) lyr_copy.CreateField(fieldDefn) code = 1 for a in class_names: print(class_names[code - 1]) lyr_copy.SetAttributeFilter("klasa = '{0}'".format(class_names[code - 1])) for f in lyr_copy: f.SetField('kod', code) lyr_copy.SetFeature(f) code += 1 print('created') return output_name + '.shp'
def ogr_to_shp( bbox: BBOX, src_layers: List[ogr.Layer], dst_path: str, dst_layer_name: str, dst_crs_code: str, ) -> List[str]: gen_driver = ogr.GetDriverByName("ESRI Shapefile") gen_datasource = gen_driver.CreateDataSource(dst_path) gen_srs = ogr.osr.SpatialReference() gen_srs.ImportFromEPSG(int(dst_crs_code.split(":")[-1])) for i, src_layer in enumerate(src_layers): if src_layer.GetGeomType() == ogr.wkbNone: logging.debug( f"Layer {src_layer.GetName()} does not contain geometries, skipping" ) continue src_layer_srs = src_layer.GetSpatialRef() clip_geometry = bbox.transform_as_geom( f"{src_layer_srs.GetAuthorityName(None)}:{src_layer_srs.GetAuthorityCode(None)}" ) if i == 0: gen_layer = gen_datasource.CreateLayer( dst_layer_name, gen_srs, src_layer.GetLayerDefn().GetGeomType()) for j in range(src_layer.GetLayerDefn().GetFieldCount()): field_defn = src_layer.GetLayerDefn().GetFieldDefn(j) gen_layer.CreateField(field_defn) src_layer.SetSpatialFilter(clip_geometry) logging.debug( f"Clipped src_layer to {src_layer.GetFeatureCount()} features") while filtered_feature := src_layer.GetNextFeature(): contained_feature = filtered_feature.Clone() contained_geometry = contained_feature.GetGeometryRef( ).Intersection(clip_geometry) if contained_geometry: contained_geometry.AssignSpatialReference( contained_feature.GetGeometryRef().GetSpatialReference() ) # geometry loses its spatial ref during Intersection contained_geometry.TransformTo(gen_srs) contained_feature.SetGeometryDirectly(contained_geometry) gen_layer.CreateFeature(contained_feature)
def get_datasource_from_bbox(bbox: BBOX, output_dir: str) -> None: driver = ogr.GetDriverByName("GPKG") gpkg_path = os.path.join(output_dir, BBOX_GPKG_NAME) datasource = driver.Open(gpkg_path) if not datasource: datasource = driver.CreateDataSource(gpkg_path) layer = datasource.GetLayerByName(BBOX_LAYER_NAME) srs = osr.SpatialReference() srs.SetFromUserInput(bbox.crs_code) if not layer: layer = datasource.CreateLayer(BBOX_LAYER_NAME, srs, ogr.wkbPolygon) if layer.GetFeatureCount() == 0: geometry = ogr.CreateGeometryFromWkt(bbox.get_wkt()) feature_defn = layer.GetLayerDefn() feature = ogr.Feature(feature_defn) feature.SetGeometry(geometry) layer.CreateFeature(feature) feature = None layer, datasource = None, None return gpkg_path
def provision(bbox: BBOX, run_id: str) -> List[str]: logging.info( "Retrieving BC Freshwater Atlas - this could take a while the first time" ) fgdb = retrieve_directory("ftp.geobc.gov.bc.ca", "/sections/outgoing/bmgs/FWA_Public/FWA_BC.gdb") logging.info("Retrieved BC Freshwater Atlas") run_directory = get_run_data_path(run_id, (CACHE_DIR_NAME, )) os.makedirs(run_directory) driver = ogr.GetDriverByName("OpenFileGDB") datasource = driver.Open(fgdb) result = ogr_to_shp( bbox, [datasource.GetLayerByName("FWA_WETLANDS_POLY")], os.path.join(run_directory, "bc_wetlands.shp"), "bc_wetlands", OUTPUT_CRS_CODE, ) datasource = None return result
def provision( bbox: BBOX, run_id: str, src_layer_name: str, dst_layer_name: str, ) -> List[str]: kmz_driver = ogr.GetDriverByName("LIBKML") kmz_datasets = [ kmz_driver.Open(filename) for filename in glob.iglob( get_data_path(("avcan-ates-areas-2020-06-23", "**", "*.kmz")), recursive=True, ) ] run_directory = get_run_data_path(run_id, (CACHE_DIR_NAME, )) os.makedirs(run_directory, exist_ok=True) result = ogr_to_shp( bbox, [ds.GetLayerByName(src_layer_name) for ds in kmz_datasets], os.path.join(run_directory, f"{dst_layer_name}.shp"), dst_layer_name, OUTPUT_CRS_CODE, ) kmz_datasets = None return result
import os from gdal import ogr, osr from typing import Final, List from app.common.bbox import BBOX GPKG_DRIVER: Final = ogr.GetDriverByName("GPKG") LAYER_NAME: Final = "areas" def _get_gpkg_path(result_dir: str) -> str: return os.path.join(result_dir, "coverage.gpkg") def has_prior_run(result_dir: str, bbox: BBOX) -> bool: wkts = [prior_run.get_wkt() for prior_run in get_prior_runs(result_dir)] return bbox.get_wkt() in wkts def get_prior_runs(result_dir: str) -> List[BBOX]: path = _get_gpkg_path(result_dir) if os.path.exists(path): datasource = GPKG_DRIVER.Open(path, 0) layer = datasource.GetLayerByName(LAYER_NAME) runs = list() while area_feature := layer.GetNextFeature(): run_envelope = area_feature.GetGeometryRef().GetEnvelope() runs.append( BBOX( min_x=run_envelope[0],
rect_polygon=True) gdal_ds = gdal.Open(cur_file_path, GA_ReadOnly) polygons_list = polygons_list \ + utils.image_coords_to_geo(image_polygons, gdal_ds.GetGeoTransform(), gdal_ds.RasterXSize) polygons_list_rect = polygons_list_rect \ + utils.image_coords_to_geo(image_polygons_rect, gdal_ds.GetGeoTransform(), gdal_ds.RasterXSize) # gets projection wkt of the last processed file projection_wkt = gdal_ds.GetProjection() mult_p = MultiPolygon(polygons_list) mult_p_r = MultiPolygon(polygons_list_rect) driver = ogr.GetDriverByName('ESRI Shapefile') driver_r = ogr.GetDriverByName('ESRI Shapefile') driver_c = ogr.GetDriverByName('ESRI Shapefile') driver_gjc = ogr.GetDriverByName('GeoJSON') ds = driver.CreateDataSource(os.path.join(dst_dir, "{}.shp".format(args.ln))) ds_r = driver_r.CreateDataSource( os.path.join(dst_dir, "{}-rect.shp".format(args.ln))) ds_c = driver_c.CreateDataSource( os.path.join(dst_dir, "{}-centroids.shp".format(args.ln))) ds_gjc = driver_gjc.CreateDataSource( os.path.join(dst_dir, "{}-centroids.geojson".format(args.ln))) source_srs = osr.SpatialReference() source_srs.ImportFromWkt(projection_wkt) srs = source_srs
import gdal from gdal import osr, ogr rasterfile = "" csvfile = "" csvname = (csvfile.split('/')[-1]).split('.')[0] # use a dictionary reader so we can access by field name # set up the shapefile driver driver = ogr.GetDriverByName("ESRI Shapefile") # create the data source data_source = driver.CreateDataSource(csvname + ".shp") # create the spatial reference, WGS84 srs = osr.SpatialReference() srs.ImportFromEPSG(4326) # create the layer layer = data_source.CreateLayer(csvname + "_Aggregations", srs, ogr.wkbPoint) # Add the fields we're interested in field_name = ogr.FieldDefn("Name", ogr.OFTString) field_name.SetWidth(24) layer.CreateField(field_name) layer.CreateField(ogr.FieldDefn("Latitude", ogr.OFTReal)) layer.CreateField(ogr.FieldDefn("Longitude", ogr.OFTReal)) with open(csvfile, 'r') as csvf: reader = csv.DictReader(csvf)
def _tiger_to_tract(self, infile): """ Converts collection of Census Tiger files into a geopandas.GeoDataFrame of census tracts Modified from original at https://svn.osgeo.org/gdal/tags/1.4.3/gdal/pymod/samples/tigerpoly.py """ class Module(object): def __init__(mod): mod.lines = {} mod.poly_line_links = {} outfile = 'tracts.shp' # Open the datasource to operate on. ds = ogr.Open(infile, update=0) poly_layer = ds.GetLayerByName('Polygon') # Create output file for the composed polygons. nad83 = osr.SpatialReference() nad83.SetFromUserInput('NAD83') shp_driver = ogr.GetDriverByName('ESRI Shapefile') shp_driver.DeleteDataSource(outfile) shp_ds = shp_driver.CreateDataSource(outfile) shp_layer = shp_ds.CreateLayer( 'out', geom_type=ogr.wkbPolygon, srs=nad83) src_defn = poly_layer.GetLayerDefn() poly_field_count = src_defn.GetFieldCount() for fld_index in range(poly_field_count): src_fd = src_defn.GetFieldDefn(fld_index) fd = ogr.FieldDefn(src_fd.GetName(), src_fd.GetType()) fd.SetWidth(src_fd.GetWidth()) fd.SetPrecision(src_fd.GetPrecision()) shp_layer.CreateField(fd) # Read all features in the line layer, holding just the geometry in a hash # for fast lookup by TLID. line_layer = ds.GetLayerByName('CompleteChain') line_count = 0 modules_hash = {} feat = line_layer.GetNextFeature() geom_id_field = feat.GetFieldIndex('TLID') tile_ref_field = feat.GetFieldIndex('MODULE') while feat is not None: geom_id = feat.GetField(geom_id_field) tile_ref = feat.GetField(tile_ref_field) try: module = modules_hash[tile_ref] except: module = Module() modules_hash[tile_ref] = module module.lines[geom_id] = feat.GetGeometryRef().Clone() line_count = line_count + 1 feat.Destroy() feat = line_layer.GetNextFeature() # Read all polygon/chain links and build a hash keyed by POLY_ID listing # the chains (by TLID) attached to it. link_layer = ds.GetLayerByName('PolyChainLink') feat = link_layer.GetNextFeature() geom_id_field = feat.GetFieldIndex('TLID') tile_ref_field = feat.GetFieldIndex('MODULE') lpoly_field = feat.GetFieldIndex('POLYIDL') rpoly_field = feat.GetFieldIndex('POLYIDR') link_count = 0 while feat is not None: module = modules_hash[feat.GetField(tile_ref_field)] tlid = feat.GetField(geom_id_field) lpoly_id = feat.GetField(lpoly_field) rpoly_id = feat.GetField(rpoly_field) if lpoly_id == rpoly_id: feat.Destroy() feat = link_layer.GetNextFeature() continue try: module.poly_line_links[lpoly_id].append(tlid) except: module.poly_line_links[lpoly_id] = [tlid] try: module.poly_line_links[rpoly_id].append(tlid) except: module.poly_line_links[rpoly_id] = [tlid] link_count = link_count + 1 feat.Destroy() feat = link_layer.GetNextFeature() # Process all polygon features. feat = poly_layer.GetNextFeature() tile_ref_field = feat.GetFieldIndex('MODULE') polyid_field = feat.GetFieldIndex('POLYID') degenerate_count = 0 while feat is not None: module = modules_hash[feat.GetField(tile_ref_field)] polyid = feat.GetField(polyid_field) tlid_list = module.poly_line_links[polyid] link_coll = ogr.Geometry(type=ogr.wkbGeometryCollection) for tlid in tlid_list: geom = module.lines[tlid] link_coll.AddGeometry(geom) try: poly = ogr.BuildPolygonFromEdges(link_coll) if poly.GetGeometryRef(0).GetPointCount() < 4: degenerate_count = degenerate_count + 1 poly.Destroy() feat.Destroy() feat = poly_layer.GetNextFeature() continue feat2 = ogr.Feature(feature_def=shp_layer.GetLayerDefn()) for fld_index in range(poly_field_count): feat2.SetField(fld_index, feat.GetField(fld_index)) feat2.SetGeometryDirectly(poly) shp_layer.CreateFeature(feat2) feat2.Destroy() except: warn('BuildPolygonFromEdges failed.') feat.Destroy() feat = poly_layer.GetNextFeature() if degenerate_count: warn('Discarded %d degenerate polygons.' % degenerate_count) # Cleanup shp_ds.Destroy() shp_ds = None ds.Destroy() ds = None # build a fully-qualified fips code and dissolve on it to create tract geographies gdf = gpd.read_file(outfile) if "CTBNA90" in gdf.columns: gdf = gdf.rename(columns={"CTBNA90": 'TRACT', "BLK90": "BLOCK"}) gdf['STATE'] = gdf['STATE'].astype(str).str.rjust(2, "0") gdf['COUNTY'] = gdf['COUNTY'].astype(str).str.rjust(3, "0") gdf['TRACT'] = gdf['TRACT'].astype(str).str.rjust(6, "0") gdf['BLOCK'] = gdf['BLOCK'].astype(str).str.rjust(4, "0") gdf['fips'] = gdf.STATE + gdf.COUNTY + gdf.TRACT if self.geom == 'block': gdf['fips'] += gdf.BLOCK gdf = gdf.dropna(subset=['fips']) gdf.geometry = gdf.buffer(0) gdf = gdf.dissolve(by='fips') gdf.reset_index(inplace=True) shp_driver.DeleteDataSource(outfile) return gdf
import owslib.wps import owslib.etree from gdal import ogr gml = owslib.wps.GMLMultiPolygonFeatureCollection([[(-102.8184, 39.5273), (-102.8184, 37.418), (-101.2363, 37.418), (-101.2363, 39.5273), (-102.8184, 39.5273)]]) tmp = open("/tmp/gml", 'wb') tmp.write(owslib.etree.etree.tostring(gml.getXml())) tmp.close() drv = ogr.GetDriverByName("GML") ds = drv.Open(tmp.name, 0) lyr = ds.GetLayer() for f in lyr: print(f.GetGeometryRef()) ds.Destroy()
# set the folder where your data exists nowDatPath = pathlib.Path("Downloads/Now") # set folder for stacked tiff export if not os.path.exists(os.path.join(tiffPath, "Now")): os.makedirs(os.path.join(tiffPath, "Now")) nowPath = os.path.join(tiffPath, "Now") stack20ms2tiff(nowDatPath, nowPath) # clip to area of interest (aoi) and create mosaic----------------------- # if folder contains more than one tile, mosaic them --------------------- VectorFormat = 'ESRI Shapefile' VectorDriver = ogr.GetDriverByName(VectorFormat) aoiVectorDataset = VectorDriver.Open(os.path.join(cwd, aoiPath), 0) # 0=Read-only, 1=Read-Write # Check to see if shapefile is found. if aoiVectorDataset is None: print("---------------------------------------------") print('Could not open ' + str(aoiPath)) else: print("---------------------------------------------") print('Opened ' + str(aoiPath)) layer = aoiVectorDataset.GetLayer() # create a folder for mosaicked clipped TIFF products if not os.path.exists("Clipped-Mos"): os.makedirs("Clipped-Mos") tiffPath = pathlib.Path("Clipped-Mos")
def difference(f1, f2, out): try: print("Difference between:") print(" ",f1) print(" ",f2) fgc_shp = ogr.Open(f1) lyr1 = fgc_shp.GetLayer() fgc_shp2 = ogr.Open(f2) lyr2 = fgc_shp2.GetLayer() driver=ogr.GetDriverByName('ESRI Shapefile') ds=driver.CreateDataSource(out) diff_lyr = ds.CreateLayer('temp', lyr2.GetSpatialRef(), ogr.wkbMultiPolygon ) # diff_lyr = createFieldsFrom(lyr2, diff_lyr) union1 = ogr.Geometry(ogr.wkbMultiPolygon) for feat1 in lyr1: geom1 = feat1.GetGeometryRef() if geom1 != None: union1 = union1.Union(geom1) geom1 = None union2 = ogr.Geometry(ogr.wkbMultiPolygon) for feat2 in lyr2: geom2 = feat2.GetGeometryRef() if geom2 != None: union2 = union2.Union(geom2) geom2 = None union1 = union1.Buffer(0) union2 = union2.Buffer(0) print(union1.GetGeometryCount()) diff = union1.Difference(union2) new_feat = ogr.Feature(diff_lyr.GetLayerDefn()) new_feat.SetGeometry(diff) diff_lyr.CreateFeature(new_feat) # union1 = ogr.Geometry(ogr.wkbMultiPolygon) # for feat1 in lyr1: # geom1 = feat1.GetGeometryRef() # fgc_shp2 = ogr.Open(f2) # lyr2 = fgc_shp2.GetLayer() # for feat2 in lyr2: # geom2 = feat2.GetGeometryRef() # if geom1 != None and geom2 != None: ## if geom2.Intersects(geom1) or geom2.Within(geom1) or geom2.Overlaps(geom1) or geom2.Crosses(geom1): # diff = geom1.SymmetricDifference(geom2) # if diff != None: # union1.AddGeometry(diff) ## new_feat = ogr.Feature(diff_lyr.GetLayerDefn()) ## new_feat.SetGeometry(diff) ## diff_lyr.CreateFeature(new_feat) # # geom2 = None # new_feat = None # diff=None # # geom1 = None # new_feat = ogr.Feature(diff_lyr.GetLayerDefn()) # new_feat.SetGeometry(union1) # diff_lyr.CreateFeature(new_feat) except: print("exception thrown!") traceback.print_exc() new_feat= None union1 = None union2 = None diff_lyr = None fgc_shp = None fgc_shp2 = None ds = None lyr1 = None lyr2 = None