def geo_to_img_coords(img_filename, x, y): ''' transform geographic coordinates, to pixel coordinates ''' geotiff = gdal.Open(img_filename) #transform coordinates from Geographic to image geo-coordinates dst_projection = geotiff.GetProjection() dst_reference = osr.SpatialReference() dst_reference.ImportFromWkt(dst_projection) src_reference = osr.SpatialReference() src_reference.ImportFromEPSG(4326) transform = osr.CoordinateTransformation(src_reference, dst_reference) ret_x,ret_y,dummy=transform.TransformPoint(x,y,0) #transform from image geo-coordinates to pixel coordinates gt_transform = geotiff.GetGeoTransform() ret_x = (ret_x - gt_transform[0]) / gt_transform[1] ret_y = (ret_y - gt_transform[3]) / gt_transform[5] return ret_x, ret_y
def get_degree_transform(dataset): """ Gets a GDAL transform to convert coordinate latitudes and longitudes to northings and eastings associated with the NAD 1983 projection. """ # get the old coordinate system old = osr.SpatialReference() old.ImportFromWkt(dataset.GetProjection()) # create the new coordinate system nad83_wkt = ( """ GEOGCS["NAD83", DATUM["North_American_Datum_1983", SPHEROID["GRS 1980",6378137,298.257222101, AUTHORITY["EPSG","7019"]], AUTHORITY["EPSG","6269"]], PRIMEM["Greenwich",0, AUTHORITY["EPSG","8901"]], UNIT["degree",0.0174532925199433, AUTHORITY["EPSG","9108"]], AUTHORITY["EPSG","4269"]] """ ) new = osr.SpatialReference() new.ImportFromWkt(nad83_wkt) # create a transform object to convert between coordinate systems transform = osr.CoordinateTransformation(old, new) return transform
def reproject_point(geom, target_obj): ''' Reproject a point geometry :param geom: ogr.Geometry - geometry to reproject :param target_obj: ogr.DataSource, ogr.Layer, ogr.Feature, ogr.Geometry, gdal.Dataset :return: ogr.Geometry in srs of target_obj ''' import ogr, osr import gdal from geotools.raster import get_srs_ras src_prj = get_srs_vec(geom) if isinstance(target_obj, (ogr.DataSource, ogr.Layer, ogr.Geometry)): target_prj = get_srs_vec(target_obj) elif isinstance(target_obj, gdal.Dataset): target_prj = get_srs_ras(target_obj) coordTrans = osr.CoordinateTransformation(src_prj, target_prj) # transform point geom.Transform(coordTrans) return (geom)
def __init__(self, geoImg, outputDir): self.geoImg = geoImg mercator = osr.SpatialReference() # convert everything to meters for clearer area, length, width operations mercator.ImportFromEPSG(3857) self.projTransform = osr.CoordinateTransformation(self.geoImg.tifProjection, mercator) driver = ogr.GetDriverByName("GeoJSON") self.outputDir = outputDir self.fullDataGeojsonPath = os.path.join(outputDir, 'fullData.geojson') if os.path.exists(self.fullDataGeojsonPath): os.remove(self.fullDataGeojsonPath) self.fullDataSource = driver.CreateDataSource(self.fullDataGeojsonPath) # create the layer self.fullDataLayer = self.fullDataSource.CreateLayer("payload", mercator, ogr.wkbPolygon) # Add the fields we're interested in field_type = ogr.FieldDefn("Label", ogr.OFTString) field_type.SetWidth(24) self.fullDataLayer.CreateField(field_type) field_type = ogr.FieldDefn("ObjId", ogr.OFTString) field_type.SetWidth(24) self.fullDataLayer.CreateField(field_type) self.fullDataLayer.CreateField(ogr.FieldDefn("Area", ogr.OFTInteger)) self.fullDataLayer.CreateField(ogr.FieldDefn("ClassId", ogr.OFTInteger)) self.fullDataLayer.CreateField(ogr.FieldDefn("Score", ogr.OFTReal)) self.fullDataLayer.CreateField(ogr.FieldDefn("is_partial", ogr.OFTInteger)) self.cleanedGeojsonPath = os.path.join(outputDir, 'cleanData.geojson') self.cleanedBboxGeojsonPath = os.path.join(outputDir, 'cleanData_bbox.geojson') self.patchesGeojsonPath = os.path.join(outputDir, 'patchesData.geojson') if os.path.exists(self.patchesGeojsonPath): os.remove(self.patchesGeojsonPath) self.patchesSource = driver.CreateDataSource(self.patchesGeojsonPath) # create the layer self.patchesLayer = self.patchesSource.CreateLayer("payload", mercator, ogr.wkbPolygon)
def extract_point_from_raster(point, data_source, band_number=1): """Return floating-point value that corresponds to given point.""" # Convert point co-ordinates so that they are in same projection as raster point_sr = point.GetSpatialReference() raster_sr = osr.SpatialReference() raster_sr.ImportFromWkt(data_source.GetProjection()) transform = osr.CoordinateTransformation(point_sr, raster_sr) point.Transform(transform) # Convert geographic co-ordinates to pixel co-ordinates x, y = point.GetX(-86.77208918), point.GetY(36.16469217) forward_transform = Affine.from_gdal(*data_source.GetGeoTransform()) reverse_transform = ~forward_transform px, py = reverse_transform * (x, y) px, py = int(px + 0.5), int(py + 0.5) # Extract pixel value band = data_source.GetRasterBand(band_number) structval = band.ReadRaster(px, py, 1, 1, buf_type=gdal.GDT_Float32) result = struct.unpack('f', structval)[0] if result == band.GetNoDataValue(): result = float('nan') return result
def utm_2_wgs84(zone, easting, northing): ''' utm_2_wgs84() - Reproject from UTM to geographic coordinates Parameters ---------- zone : str UTM Zone easting : float Easting in UTM northing : float Notrhing in UTM Return ------ (lon, lat, altitude) ''' import osr as osr utm_coordinate_system = osr.SpatialReference() # Set geographic coordinate system to handle lat/lon utm_coordinate_system.SetWellKnownGeogCS('WGS84') is_northern = northing > 0 utm_coordinate_system.SetUTM(zone, is_northern) # Clone ONLY the geographic coordinate system wgs84_coordinate_system = (utm_coordinate_system.CloneGeogCS()) # create transform component utm_to_wgs84_transform = osr.CoordinateTransformation( utm_coordinate_system, wgs84_coordinate_system) # (<from>, <to>) return utm_to_wgs84_transform.TransformPoint(easting, northing, 0)
def latLonsToPixel(latLonPairs): # get georeference info transform = ds.GetGeoTransform() xOrigin = transform[0] yOrigin = transform[3] pixelWidth = transform[1] pixelHeight = transform[5] # Create a spatial reference object for the dataset srs = osr.SpatialReference() srs.ImportFromWkt(ds.GetProjection()) # Set up the coordinate transformation object srsLatLong = srs.CloneGeogCS() ct = osr.CoordinateTransformation(srsLatLong,srs) # Go through all the point pairs and translate them to latitude/longitude pairings pixelPairs = [] for point in latLonPairs: # Change the point locations into the GeoTransform space (point[1],point[0],holder) = ct.TransformPoint(point[1],point[0]) # Translate the x and y coordinates into pixel values x = (point[1] - xOrigin) / pixelWidth y = (point[0] - yOrigin) / pixelHeight # Add the point to our return array pixelPairs.append([int(x),int(y)]) return pixelPairs
def get_grid_convergence(lon, lat, map_crs): """ Get grid convergence angles. Arguments: lon: list of floats Longitude. West: negative; East: positive. lat: list of floats Latitde. North: positive; South: negative. Reuturns: grid_convergence: array of floats Grid convergence in degrees. """ lon, lat = np.array(lon), np.array(lat) if map_crs.GetAttrValue('PROJECTION').lower() == 'transverse_mercator': lon0 = map_crs.GetProjParm('central_meridian') lon = np.deg2rad(lon) lat = np.deg2rad(lat) lon0 = np.deg2rad(lon0) grid_convergence = np.arctan(np.tan(lon - lon0) * np.sin(lat)) grid_convergence = np.rad2deg(grid_convergence) else: delta_lat = 1e-4 lon_lat_0 = np.array([lon, lat]).T lon_lat_1 = np.array([lon, lat + delta_lat]).T wgs84_crs = define_wgs84_crs() transform = osr.CoordinateTransformation(wgs84_crs, map_crs) xy0 = np.array(transform.TransformPoints(lon_lat_0)) xy1 = np.array(transform.TransformPoints(lon_lat_1)) dx = xy1[:, 0] - xy0[:, 0] dy = xy1[:, 1] - xy0[:, 1] grid_convergence = np.abs( np.rad2deg(np.arcsin(dx / np.sqrt(dx**2 + dy**2)))) index = dx * dy > 0 grid_convergence[index] = -grid_convergence[index] return grid_convergence
def convertShpToExtend(pathToShp): """ reprojette en WGS84 et recupere l'extend """ driver = ogr.GetDriverByName('ESRI Shapefile') dataset = driver.Open(pathToShp) if dataset is not None: # from Layer layer = dataset.GetLayer() spatialRef = layer.GetSpatialRef() # from Geometry feature = layer.GetNextFeature() geom = feature.GetGeometryRef() spatialRef = geom.GetSpatialReference() #WGS84 outSpatialRef = osr.SpatialReference() outSpatialRef.ImportFromEPSG(4326) coordTrans = osr.CoordinateTransformation(spatialRef, outSpatialRef) env = geom.GetEnvelope() pointMAX = ogr.Geometry(ogr.wkbPoint) pointMAX.AddPoint(env[1], env[3]) pointMAX.Transform(coordTrans) pointMIN = ogr.Geometry(ogr.wkbPoint) pointMIN.AddPoint(env[0], env[2]) pointMIN.Transform(coordTrans) return [pointMAX.GetPoint()[1],pointMIN.GetPoint()[0],pointMIN.GetPoint()[1],pointMAX.GetPoint()[0]] else: exit(" shapefile not found. Please verify your path to the shapefile")
def transform_coordinates(pointX, pointY, iEPSG, oEPSG): """ Transforms set of coordinates from one coordinate system to another """ # create a geometry from coordinates point = ogr.Geometry(ogr.wkbPoint) point.AddPoint(pointX, pointY) # create coordinate transformation source = osr.SpatialReference() source.ImportFromEPSG(iEPSG) target = osr.SpatialReference() target.ImportFromEPSG(oEPSG) coordTransform = osr.CoordinateTransformation( source, target ) # transform point point.Transform(coordTransform) return point.GetX(), point.GetY()
def osr_ct_4(): if gdaltest.have_proj4 == 0: return 'skip' utm_srs = osr.SpatialReference() utm_srs.SetUTM(11) utm_srs.SetWellKnownGeogCS('WGS84') ll_srs = osr.SpatialReference() ll_srs.SetWellKnownGeogCS('WGS84') gdaltest.ct = osr.CoordinateTransformation(ll_srs, utm_srs) result = gdaltest.ct.TransformPoints([(-117.5, 32.0, 0.0), (-117.5, 32.0)]) for i in range(2): if abs(result[i][0] - 452772.06) > 0.01 \ or abs(result[i][1] - 3540544.89 ) > 0.01 \ or abs(result[i][2] - 0.0) > 0.01: gdaltest.post_reason('Wrong LL to UTM result') return 'fail' return 'success'
def get_image_wkt(product): src = gdal.Open(product) ulx, xres, xskew, uly, yskew, yres = src.GetGeoTransform() max_x = ulx + (src.RasterXSize * xres) min_y = uly + (src.RasterYSize * yres) min_x = ulx max_y = uly source = osr.SpatialReference() source.ImportFromWkt(src.GetProjection()) target = osr.SpatialReference() target.ImportFromEPSG(4326) transform = osr.CoordinateTransformation(source, target) result_wkt = box(transform.TransformPoint(min_x, min_y)[0], transform.TransformPoint(min_x, min_y)[1], transform.TransformPoint(max_x, max_y)[0], transform.TransformPoint(max_x, max_y)[1]).wkt return result_wkt
def HandleTile(t, shp, dstdir, csvpath, args, exclude_list): otxtpath = os.path.join( dstdir, "%s_%s_orig.txt" % (os.path.basename(csvpath)[:-4], t.name)) mtxtpath = os.path.join( dstdir, "%s_%s_ortho.txt" % (os.path.basename(csvpath)[:-4], t.name)) if os.path.isfile(otxtpath) and os.path.isfile( mtxtpath) and args.overwrite is False: logger.info("Tile %s processing files already exist" % t.name) else: logger.debug("Tile %s" % (t.name)) t_srs = osr.SpatialReference() t_srs.ImportFromEPSG(t.epsg) #### Open Shp shpd, shpn = os.path.split(shp) shpbn, shpe = os.path.splitext(shpn) ds = ogr.Open(shp) if ds is None: logger.warning("Open failed") else: lyr = ds.GetLayerByName(shpbn) #### attribute filter for online images if args.online_only: lyr.SetAttributeFilter('STATUS = "online"') s_srs = lyr.GetSpatialRef() #logger.debug(str(s_srs)) logger.debug(str(t.geom)) if not t_srs.IsSame(s_srs): ict = osr.CoordinateTransformation(t_srs, s_srs) ct = osr.CoordinateTransformation(s_srs, t_srs) lyr.ResetReading() feat = lyr.GetNextFeature() imginfo_list1 = [] while feat: iinfo = ImageInfo(feat, "RECORD", srs=s_srs) if iinfo.geom is not None and iinfo.geom.GetGeometryType( ) == ogr.wkbPolygon: if not t_srs.IsSame(s_srs): iinfo.geom.Transform(ct) if iinfo.geom.Intersect(t.geom): if iinfo.scene_id in exclude_list: logger.debug( "Scene in exclude list, excluding: %s" % iinfo.srcfp) elif args.online_only and not os.path.isfile( iinfo.srcfp): logger.warning( "Scene does not exist, excluding: {0}".format( iinfo.srcfp)) else: logger.debug( "Intersect %s, %s: %s" % (iinfo.scene_id, iinfo.srcfp, str(iinfo.geom))) imginfo_list1.append(iinfo) feat = lyr.GetNextFeature() ds = None logger.info("Number of intersects in tile %s: %i" % (t.name, len(imginfo_list1))) if len(imginfo_list1) > 0: if args.nosort is False: #### Get mosaic parameters logger.debug("Getting mosaic parameters") params = getMosaicParameters(imginfo_list1[0], args) #### Remove images that do not match ref logger.debug("Setting image pattern filter") imginfo_list2 = filterMatchingImages(imginfo_list1, params) logger.info("Number of images matching filter: %i" % (len(imginfo_list2))) #### Sort by quality logger.debug("Sorting images by quality") imginfo_list3 = [] for iinfo in imginfo_list2: iinfo.getScore(params) if iinfo.score > 0: imginfo_list3.append(iinfo) imginfo_list3.sort(key=lambda x: x.score) if args.build_shp: ####################################################### #### Create Shp shp = os.path.join( dstdir, "%s_%s_imagery.shp" % (os.path.basename(csvpath)[:-4], t.name)) logger.debug("Creating shapefile of geoms: %s" % shp) fields = [("IMAGENAME", ogr.OFTString, 100), ("SCORE", ogr.OFTReal, 0)] OGR_DRIVER = "ESRI Shapefile" ogrDriver = ogr.GetDriverByName(OGR_DRIVER) if ogrDriver is None: logger.debug("OGR: Driver %s is not available" % OGR_DRIVER) sys.exit(-1) if os.path.isfile(shp): ogrDriver.DeleteDataSource(shp) vds = ogrDriver.CreateDataSource(shp) if vds is None: logger.debug("Could not create shp") sys.exit(-1) shpd, shpn = os.path.split(shp) shpbn, shpe = os.path.splitext(shpn) lyr = vds.CreateLayer(shpbn, t_srs, ogr.wkbPolygon) if lyr is None: logger.debug("ERROR: Failed to create layer: %s" % shpbn) sys.exit(-1) for fld, fdef, flen in fields: field_defn = ogr.FieldDefn(fld, fdef) if fdef == ogr.OFTString: field_defn.SetWidth(flen) if lyr.CreateField(field_defn) != 0: logger.debug( "ERROR: Failed to create field: %s" % fld) for iinfo in imginfo_list3: logger.debug("Image: %s" % (iinfo.srcfn)) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField("IMAGENAME", iinfo.srcfn) feat.SetField("SCORE", iinfo.score) feat.SetGeometry(iinfo.geom) if lyr.CreateFeature(feat) != 0: logger.debug( "ERROR: Could not create feature for image %s" % iinfo.srcfn) else: logger.debug("Created feature for image: %s" % iinfo.srcfn) feat.Destroy() #### Overlay geoms and remove non-contributors logger.debug("Overlaying images to determine contributors") contribs = [] for i in xrange(0, len(imginfo_list3)): iinfo = imginfo_list3[i] basegeom = iinfo.geom for j in range(i + 1, len(imginfo_list3)): iinfo2 = imginfo_list3[j] geom2 = iinfo2.geom if basegeom.Intersects(geom2): basegeom = basegeom.Difference(geom2) if basegeom is None or basegeom.IsEmpty(): #logger.debug("Broke after %i comparisons" %j) break if basegeom is None: logger.debug("Function Error: %s" % iinfo.srcfp) elif basegeom.IsEmpty(): logger.debug( "Removing non-contributing image: %s" % iinfo.srcfp) else: basegeom = basegeom.Intersection(t.geom) if basegeom is None: logger.debug("Function Error: %s" % iinfo.srcfp) elif basegeom.IsEmpty(): logger.debug( "Removing non-contributing image: %s" % iinfo.srcfp) else: contribs.append(iinfo.srcfp) elif args.nosort is True: contribs = image_list logger.info("Number of contributing images: %i" % (len(contribs))) if len(contribs) > 0: #### Write textfiles if not os.path.isdir(dstdir): os.makedirs(dstdir) otxtpath = os.path.join( dstdir, "%s_%s_orig.txt" % (os.path.basename(csvpath)[:-4], t.name)) mtxtpath = os.path.join( dstdir, "%s_%s_ortho.txt" % (os.path.basename(csvpath)[:-4], t.name)) otxt = open(otxtpath, 'w') mtxt = open(mtxtpath, 'w') for contrib in contribs: if not os.path.isfile(contrib): logger.warning("Image does not exist: %s" % (contrib)) otxt.write("%s\n" % contrib) m_fn = "{0}_u08{1}{2}.tif".format( os.path.splitext(os.path.basename(contrib))[0], args.stretch, t.epsg) mtxt.write( os.path.join(dstdir, 'orthos', t.name, m_fn) + "\n") otxt.close()
def inverse_coordinate_transformation(self): if not getattr(self, '_ict', None): self._ict = osr.CoordinateTransformation( self.geospatial_coordinate_system, self.spatial_reference) return self._ict
def HandleTile(t, src, dstdir, csvpath, args, exclude_list): otxtpath = os.path.join( dstdir, "{}_{}_orig.txt".format(os.path.basename(csvpath)[:-4], t.name)) mtxtpath = os.path.join( dstdir, "{}_{}_ortho.txt".format(os.path.basename(csvpath)[:-4], t.name)) if os.path.isfile(otxtpath) and os.path.isfile( mtxtpath) and args.overwrite is False: logger.info("Tile %s processing files already exist", t.name) else: logger.info("Tile %s", t.name) t_srs = osr.SpatialReference() t_srs.ImportFromEPSG(t.epsg) #### Open mfp dsp, lyrn = utils.get_source_names(src) ds = ogr.Open(dsp) if ds is None: logger.error("Open failed") else: lyr = ds.GetLayerByName(lyrn) if not lyr: raise RuntimeError( "Layer {} does not exist in dataset {}".format(lyrn, dsp)) else: s_srs = lyr.GetSpatialRef() #logger.debug(str(s_srs)) #logger.debug(str(t.geom)) tile_geom_in_s_srs = t.geom.Clone() if not t_srs.IsSame(s_srs): ict = osr.CoordinateTransformation(t_srs, s_srs) ct = osr.CoordinateTransformation(s_srs, t_srs) tile_geom_in_s_srs.Transform(ict) # if the geometry crosses meridian, split it into multipolygon (else this breaks SetSpatialFilter) if utils.doesCross180(tile_geom_in_s_srs): logger.debug( "tile_geom_in_s_srs crosses 180 meridian; splitting to multiple polygons..." ) tile_geom_in_s_srs = utils.getWrappedGeometry( tile_geom_in_s_srs) lyr.ResetReading() lyr.SetSpatialFilter(tile_geom_in_s_srs) feat = lyr.GetNextFeature() imginfo_list1 = [] while feat: iinfo = mosaic.ImageInfo(feat, "RECORD", srs=s_srs) if iinfo.geom is not None and iinfo.geom.GetGeometryType( ) in (ogr.wkbPolygon, ogr.wkbMultiPolygon): if not t_srs.IsSame(s_srs): iinfo.geom.Transform(ct) ## fix self-intersection errors caused by reprojecting over 180 temp = iinfo.geom.Buffer( 0.1 ) # assumes a projected coordinate system with meters or feet as units iinfo.geom = temp if iinfo.geom.Intersects(t.geom): if iinfo.scene_id in exclude_list: logger.debug( "Scene in exclude list, excluding: %s", iinfo.srcfp) elif not os.path.isfile(iinfo.srcfp): logger.warning( "Scene path is invalid, excluding %s (path = %s)", iinfo.scene_id, iinfo.srcfp) elif args.require_pan: srcfp = iinfo.srcfp srcdir, mul_name = os.path.split(srcfp) if iinfo.sensor in ["WV02", "WV03", "QB02"]: pan_name = mul_name.replace("-M", "-P") elif iinfo.sensor == "GE01": if "_5V" in mul_name: pan_name_base = srcfp[:-24].replace( "M0", "P0") candidates = glob.glob(pan_name_base + "*") candidates2 = [ f for f in candidates if f.endswith(('.ntf', '.NTF', '.tif', '.TIF')) ] if len(candidates2) == 0: pan_name = '' elif len(candidates2) == 1: pan_name = os.path.basename( candidates2[0]) else: pan_name = '' logger.error( '%i panchromatic images match the multispectral image name ' '%s', len(candidates2), mul_name) else: pan_name = mul_name.replace("-M", "-P") elif iinfo.sensor == "IK01": pan_name = mul_name.replace("blu", "pan") pan_name = mul_name.replace("msi", "pan") pan_name = mul_name.replace("bgrn", "pan") pan_srcfp = os.path.join(srcdir, pan_name) if not os.path.isfile(pan_srcfp): logger.debug( "Image does not have a panchromatic component, excluding: %s", iinfo.srcfp) else: logger.debug("Intersect %s, %s: %s", iinfo.scene_id, iinfo.srcfp, str(iinfo.geom)) imginfo_list1.append(iinfo) else: logger.debug("Intersect %s, %s: %s", iinfo.scene_id, iinfo.srcfp, str(iinfo.geom)) imginfo_list1.append(iinfo) feat = lyr.GetNextFeature() ds = None logger.info("Number of intersects in tile %s: %i", t.name, len(imginfo_list1)) if len(imginfo_list1) > 0: #### Get mosaic parameters logger.debug("Getting mosaic parameters") params = mosaic.getMosaicParameters(imginfo_list1[0], args) #### Remove images that do not match ref logger.debug("Setting image pattern filter") imginfo_list2 = mosaic.filterMatchingImages( imginfo_list1, params) logger.info("Number of images matching filter: %i", len(imginfo_list2)) if args.nosort is False: #### Sort by quality logger.debug("Sorting images by quality") imginfo_list3 = [] for iinfo in imginfo_list2: iinfo.getScore(params) if iinfo.score > 0: imginfo_list3.append(iinfo) # sort so highest score is last imginfo_list3.sort(key=lambda x: x.score) else: imginfo_list3 = list(imginfo_list2) #### Overlay geoms and remove non-contributors logger.debug("Overlaying images to determine contributors") contribs = mosaic.determine_contributors( imginfo_list3, t.geom, args.min_contribution_area) logger.info("Number of contributing images: %i", len(contribs)) if len(contribs) > 0: if args.build_shp: ####################################################### #### Create Shp shp = os.path.join( dstdir, "{}_{}_imagery.shp".format(args.mosaic, t.name)) logger.debug("Creating shapefile of geoms: %s", shp) fields = [("IMAGENAME", ogr.OFTString, 100), ("SCORE", ogr.OFTReal, 0)] OGR_DRIVER = "ESRI Shapefile" ogrDriver = ogr.GetDriverByName(OGR_DRIVER) if ogrDriver is None: logger.debug("OGR: Driver %s is not available", OGR_DRIVER) sys.exit(-1) if os.path.isfile(shp): ogrDriver.DeleteDataSource(shp) vds = ogrDriver.CreateDataSource(shp) if vds is None: logger.debug("Could not create shp") sys.exit(-1) shpd, shpn = os.path.split(shp) shpbn, shpe = os.path.splitext(shpn) lyr = vds.CreateLayer(shpbn, t_srs, ogr.wkbPolygon) if lyr is None: logger.debug("ERROR: Failed to create layer: %s", shpbn) sys.exit(-1) for fld, fdef, flen in fields: field_defn = ogr.FieldDefn(fld, fdef) if fdef == ogr.OFTString: field_defn.SetWidth(flen) if lyr.CreateField(field_defn) != 0: logger.debug( "ERROR: Failed to create field: %s", fld) for iinfo, geom in contribs: logger.debug("Image: %s", iinfo.srcfn) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField("IMAGENAME", iinfo.srcfn) feat.SetField("SCORE", iinfo.score) feat.SetGeometry(geom) if lyr.CreateFeature(feat) != 0: logger.debug( "ERROR: Could not create feature for image %s", iinfo.srcfn) else: logger.debug("Created feature for image: %s", iinfo.srcfn) feat.Destroy() #### Write textfiles if not os.path.isdir(dstdir): os.makedirs(dstdir) otxtpath = os.path.join( dstdir, "{}_{}_orig.txt".format(args.mosaic, t.name)) mtxtpath = os.path.join( dstdir, "{}_{}_ortho.txt".format(args.mosaic, t.name)) otxt = open(otxtpath, 'w') mtxt = open(mtxtpath, 'w') for iinfo, geom in contribs: if not os.path.isfile(iinfo.srcfp): logger.warning("Image does not exist: %s", iinfo.srcfp) otxt.write("{}\n".format(iinfo.srcfp)) m_fn = "{0}_u08{1}{2}.tif".format( os.path.splitext(iinfo.srcfn)[0], args.stretch, t.epsg) mtxt.write( os.path.join(dstdir, 'ortho', t.name, m_fn) + "\n") otxt.close()
import ogr import osr import os shp_driver = ogr.GetDriverByName('ESRI Shapefile') # input SpatialReference input_srs = osr.SpatialReference() input_srs.ImportFromEPSG(4326) # output SpatialReference output_srs = osr.SpatialReference() output_srs.ImportFromEPSG(3857) # create the CoordinateTransformation coord_trans = osr.CoordinateTransformation(input_srs, output_srs) # get the input layer input_shp = shp_driver.Open(r'../geodata/UTM_Zone_Boundaries.shp') in_shp_layer = input_shp.GetLayer() # create the output layer output_shp_file = r'../geodata/UTM_Zone_Boundaries_3857.shp' # check if output file exists if yes delete it if os.path.exists(output_shp_file): shp_driver.DeleteDataSource(output_shp_file) # create a new Shapefile object output_shp_dataset = shp_driver.CreateDataSource(output_shp_file) # create a new layer in output Shapefile and define its geometry type
def get_box_info(nc_dataset): """ (object)-> dict Return: the netCDF coverage box info as wgs84 crs """ box_info = {} original_box_info = get_original_box_info(nc_dataset) if original_box_info: if original_box_info.get( 'units', '').lower() == 'degree': # geographic coor x, y box_info = original_box_info # check if the westlimit and eastlimit are in -180-180 westlimit = float(box_info['westlimit']) eastlimit = float(box_info['eastlimit']) box_info['westlimit'] = check_lon_limit(westlimit, eastlimit)[0] box_info['eastlimit'] = check_lon_limit(westlimit, eastlimit)[1] elif original_box_info.get('projection', ''): # projection coor x, y projection_import_string_dict = \ get_nc_grid_mapping_projection_import_string_dict(nc_dataset) if projection_import_string_dict.get('type') == 'Proj4 String': try: ori_proj = Proj(projection_import_string_dict['text']) wgs84_proj = Proj(init='epsg:4326') box_info['westlimit'], box_info['northlimit'] = transform( ori_proj, wgs84_proj, original_box_info['westlimit'], original_box_info['northlimit']) box_info['eastlimit'], box_info['southlimit'] = transform( ori_proj, wgs84_proj, original_box_info['eastlimit'], original_box_info['southlimit']) except Exception: pass elif projection_import_string_dict.get('type') == 'WKT String': try: # create wgs84 geographic coordinate system wgs84_cs = osr.SpatialReference() wgs84_cs.ImportFromEPSG(4326) original_cs = osr.SpatialReference() original_cs.ImportFromWkt( projection_import_string_dict.get('text')) crs_transform = osr.CoordinateTransformation( original_cs, wgs84_cs) box_info['westlimit'], box_info[ 'northlimit'] = crs_transform.TransformPoint( float(original_box_info['westlimit']), float(original_box_info['northlimit']))[:2] box_info['eastlimit'], box_info[ 'southlimit'] = crs_transform.TransformPoint( float(original_box_info['eastlimit']), float(original_box_info['southlimit']))[:2] except Exception: pass if box_info: # change the value as string for name in box_info.keys(): box_info[name] = str(box_info[name]) box_info['units'] = 'Decimal degrees' box_info['projection'] = 'WGS 84 EPSG:4326' return box_info
def shp_reproject(srcName, tgtName, epsg=26910): ''' shp_reproject() - Project a shapefile to another shapefile in <spatRef> coordinate system. Parameters ---------- srcName : str input shapefile name tgtName : str output shapefile name epsg : int, default=26910 (NAD 83 UTM 10, CA) EPSG projection Returns ------- nothing ''' import os import osr import ogr # Set target spatial reference tgt_spatRef = osr.SpatialReference() tgt_spatRef.ImportFromEPSG(epsg) # Source shapefile driver = ogr.GetDriverByName('ESRI Shapefile') src = driver.Open(srcName, 0) srcLyr = src.GetLayer() src_spatRef = srcLyr.GetSpatialRef() # Source spatial reference # Target shapefile - delete if it's already there. if os.path.exists(tgtName): driver.DeleteDataSource(tgtName) tgt = driver.CreateDataSource(tgtName) lyrName = os.path.splitext(tgtName)[0] tgtLyr = tgt.CreateLayer(lyrName, geom_type=ogr.wkbPoint) # Layer definition featDef = srcLyr.GetLayerDefn() # Spatial Transform trans = osr.CoordinateTransformation(src_spatRef, tgt_spatRef) # Reproject and copy features srcFeat = srcLyr.GetNextFeature() while srcFeat: geom = srcFeat.GetGeometryRef() geom.Transform(trans) feature = ogr.Feature(featDef) feature.SetGeometry(geom) tgtLyr.CreateFeature(feature) feature.Destroy() srcFeat.Destroy() srcFeat = srcLyr.GetNextFeature() src.Destroy() tgt.Destroy() # Create the prj file tgt_spatRef.MorphToESRI() # Convert geometry to ESRI WKT format prj = open(lyrName + '.prj', 'w') prj.write(tgt_spatRef.ExportToWkt()) prj.close() # Just copy dbf contents over rather than rebuild the dbf using the # ogr API since we're not changing anything. srcDbf = os.path.splitext(srcName)[0] + '.dbf' tgtDbf = lyrName + '.dbf' shutil.copyfile(srcDbf, tgtDbf) return
def gen_zonalstats(zones_json, raster): """ Generator function that yields the statistics of a raster dataset within each polygon (zone) of a vector dataset. :param zones_json: Polygons in GeoJSON format :param raster: Raster dataset :return: Polygons with additional properties for calculated raster stats. """ # Open data raster = get_dataset(raster) if type(zones_json) is str: shp = ogr.Open(zones_json) zones_json = json.loads(zones_json) else: shp = ogr.Open(json.dumps(zones_json)) lyr = shp.GetLayer() # Get raster georeference info transform = raster.GetGeoTransform() xOrigin = transform[0] yOrigin = transform[3] pixelWidth = transform[1] pixelHeight = transform[5] # Reproject vector geometry to same projection as raster sourceSR = lyr.GetSpatialRef() targetSR = osr.SpatialReference() targetSR.ImportFromWkt(raster.GetProjectionRef()) coordTrans = osr.CoordinateTransformation(sourceSR, targetSR) # TODO: Use a multiprocessing pool to process features more quickly for feature in zones_json['features']: geom = ogr.CreateGeometryFromJson(json.dumps(feature['geometry'])) if sourceSR.ExportToWkt() != targetSR.ExportToWkt(): geom.Transform(coordTrans) # Get extent of feat if geom.GetGeometryName() == 'MULTIPOLYGON': count = 0 pointsX = [] pointsY = [] for polygon in geom: ring = geom.GetGeometryRef(count).GetGeometryRef(0) numpoints = ring.GetPointCount() for p in range(numpoints): lon, lat, z = ring.GetPoint(p) if abs(lon) != float('inf'): pointsX.append(lon) if abs(lat) != float('inf'): pointsY.append(lat) count += 1 elif geom.GetGeometryName() == 'POLYGON': ring = geom.GetGeometryRef(0) numpoints = ring.GetPointCount() pointsX = [] pointsY = [] for p in range(numpoints): lon, lat, z = ring.GetPoint(p) if abs(lon) != float('inf'): pointsX.append(lon) if abs(lat) != float('inf'): pointsY.append(lat) else: raise GaiaException( "ERROR: Geometry needs to be either Polygon or Multipolygon") xmin = min(pointsX) xmax = max(pointsX) ymin = min(pointsY) ymax = max(pointsY) # Specify offset and rows and columns to read xoff = int((xmin - xOrigin) / pixelWidth) yoff = int((yOrigin - ymax) / pixelWidth) xcount = int((xmax - xmin) / pixelWidth) + 1 ycount = int((ymax - ymin) / pixelWidth) + 1 # Create memory target raster target_ds = gdal.GetDriverByName('MEM').Create('', xcount, ycount, 1, gdal.GDT_Byte) target_ds.SetGeoTransform(( xmin, pixelWidth, 0, ymax, 0, pixelHeight, )) # Create for target raster the same projection as for the value raster raster_srs = osr.SpatialReference() raster_srs.ImportFromWkt(raster.GetProjectionRef()) target_ds.SetProjection(raster_srs.ExportToWkt()) # Rasterize zone polygon to raster gdal.RasterizeLayer(target_ds, [1], lyr, burn_values=[1]) # Read raster as arrays banddataraster = raster.GetRasterBand(1) try: dataraster = banddataraster.ReadAsArray(xoff, yoff, xcount, ycount).astype(numpy.float) except AttributeError: # Nothing within bounds, move on to next polygon properties = feature[u'properties'] for p in [ 'count', 'sum', 'mean', 'median', 'min', 'max', 'stddev' ]: properties[p] = None yield feature else: # Get no data value of array noDataValue = banddataraster.GetNoDataValue() if noDataValue: # Updata no data value in array with new value dataraster[dataraster == noDataValue] = numpy.nan bandmask = target_ds.GetRasterBand(1) datamask = bandmask.ReadAsArray(0, 0, xcount, ycount).astype(numpy.float) # Mask zone of raster zoneraster = numpy.ma.masked_array(dataraster, numpy.logical_not(datamask)) properties = feature['properties'] properties['count'] = zoneraster.count() properties['sum'] = numpy.nansum(zoneraster) if type(properties['sum']) == MaskedConstant: # No non-null values for raster data in polygon, skip for p in ['sum', 'mean', 'median', 'min', 'max', 'stddev']: properties[p] = None else: properties['mean'] = numpy.nanmean(zoneraster) properties['min'] = numpy.nanmin(zoneraster) properties['max'] = numpy.nanmax(zoneraster) properties['stddev'] = numpy.nanstd(zoneraster) median = numpy.ma.median(zoneraster) if hasattr(median, 'data') and not numpy.isnan(median.data): properties['median'] = median.data.item() yield (feature)
def main(): inputPath = str(sys.argv[1]).split(" ")[0] ## osm db .sqlite country = str(sys.argv[2]).split(" ")[0] ## country to be computed outD = str(sys.argv[3]).split(" ")[0] ## output directory type = str(sys.argv[4]).split(" ")[0] ## "line" or "polygon" refShp = str(sys.argv[5]).split(" ")[0] ## shapefile with srs reference outputFormat = str(sys.argv[6]).split(" ")[0] ## ".shp" or ".sqlite" base = os.path.basename(inputPath) fn = os.path.splitext(base)[0] outputDir = outD + "/" + country + "/extracted/" outputPath = outputDir + fn + ".shp" reprojectedDir = outD + "/" + country + "/reprojected/" if (outputFormat == ".shp"): reprojectedPath = reprojectedDir + fn + ".shp" elif (outputFormat == ".sqlite"): reprojectedPath = reprojectedDir + fn + ".sqlite" if not os.path.exists(outputDir): os.makedirs(outputDir) if not os.path.exists(reprojectedDir): os.makedirs(reprojectedDir) if (type == "line"): geoType = ogr.wkbLineString m = "lines" p = "LINESTRING" elif (type == "polygon"): geoType = ogr.wkbPolygon m = "multipolygons" p = "POLYGON" f = "ESRI Shapefile" subprocess.call([ "ogr2ogr", "-f", f, outputPath, inputPath, "-dsco", "SPATIALITE=NO", "-sql", "SELECT * FROM " + m, "-nlt", p ]) ##### reproject shape to target srs #refShp = "/data/Jakku/osm_test/EQUI7_V13_AF_PROJ_LAND.shp" driver = ogr.GetDriverByName("ESRI Shapefile") dataset = driver.Open(refShp) inLayer = dataset.GetLayer() targetRef = inLayer.GetSpatialRef() dataset1 = driver.Open(outputPath) print(outputPath) print(dataset1) inLayer1 = dataset1.GetLayer() sourceRef = inLayer1.GetSpatialRef() coordTrans = osr.CoordinateTransformation(sourceRef, targetRef) # create the output layer outputShapefile = reprojectedPath if (outputFormat == ".shp"): driver2 = ogr.GetDriverByName("ESRI Shapefile") elif (outputFormat == ".sqlite"): driver2 = ogr.GetDriverByName("SQLite") if os.path.exists(outputShapefile): driver2.DeleteDataSource(outputShapefile) outDataSet = driver2.CreateDataSource(outputShapefile) outLayer = outDataSet.CreateLayer("layer", targetRef, geom_type=geoType) # add fields inLayerDefn = inLayer1.GetLayerDefn() for i in range(0, inLayerDefn.GetFieldCount()): fieldDefn = inLayerDefn.GetFieldDefn(i) outLayer.CreateField(fieldDefn) # get the output layer's feature definition outLayerDefn = outLayer.GetLayerDefn() # loop through the input features inFeature = inLayer1.GetNextFeature() while inFeature: # get the input geometry geom = inFeature.GetGeometryRef() # reproject the geometry geom.Transform(coordTrans) # create a new feature outFeature = ogr.Feature(outLayerDefn) # set the geometry and attribute outFeature.SetGeometry(geom) for i in range(0, outLayerDefn.GetFieldCount()): outFeature.SetField( outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i)) # add the feature to the shapefile outLayer.CreateFeature(outFeature) # dereference the features and get the next input feature outFeature = None inFeature = inLayer1.GetNextFeature() # Save and close the shapefiles dataset = None outDataSet = None
def __init__(self, file_path, overwrite=True,lyr_name='bps_points',qsettings=None,epsg_in=CONF_INPUT_EPSG,epsg_out=CONF_OUTPUT_EPSG): """ An object with the necessary attributes and methods to turn a directory of tagged images into a shapefile. Attributes: file_path: The path to write out the shapefile to. This must be a valid file path that ends with .shp. String type. overwrite: A boolean indicating whether the file_path should be overwritten if it already exists. lyr_name: A string that will be used to name the feature layer in the shapefile. qsettings: A PyQt4 QSettings object that will be inspected to get EPSG values for input and output. If supplied, the qsettings EPSG values will be used and the those supplied directly will be ignored. epsg_in: An int value representing the input EPSG value. This will be ignored if specified in qsettings. epsg_out: An int value representing the output EPSG value. This will be ignored if specified in qsettings. """ self.overwrite = overwrite self.file_path = self.__validate_fp(file_path) self.settings = qsettings #-----Set up the shapefile------------------------- self.spatialRefIn = osr.SpatialReference() if self.settings: epsg_in, ok_in = qsettings.value("inputEPSG",CONF_INPUT_EPSG).toInt() epsg_out, ok_out = qsettings.value("outputEPSG",CONF_OUTPUT_EPSG).toInt() self.spatialRefIn.ImportFromEPSG(epsg_in) # if epsg_out is None, we want to output in the epsg_in spatial reference if epsg_out: self.spatialRefOut = osr.SpatialReference() self.spatialRefOut.ImportFromEPSG(epsg_out) self.sr_trans = osr.CoordinateTransformation(self.spatialRefIn,self.spatialRefOut) else: self.spatialRefOut = self.spatialRefIn self.sr_trans = None self.driver = ogr.GetDriverByName('ESRI Shapefile') self.ds = self.driver.CreateDataSource(self.file_path) self.lyr = self.ds.CreateLayer(lyr_name, self.spatialRefOut, ogr.wkbPoint) self.f_dict = { 'date_loc' : ogr.OFTString, 'date_utc' : ogr.OFTString, 'time_loc' : ogr.OFTString, 'time_utc' : ogr.OFTString, 'img_path' : ogr.OFTString, 'direction' : ogr.OFTReal, 'depth' : ogr.OFTReal, 'temp' : ogr.OFTReal, 'habitat' : ogr.OFTString, 'hab_color' : ogr.OFTString, 'hab_num' : ogr.OFTInteger, 'subst' : ogr.OFTString, } for k,v in self.f_dict.iteritems(): new_field = ogr.FieldDefn(k, v) if k == 'img_path': new_field.SetWidth(180) self.lyr.CreateField(new_field) self.lyrDefn = self.lyr.GetLayerDefn() self.feat_index = 0
drv2 = ogr.GetDriverByName("ESRI Shapefile") dst_ds = drv2.CreateDataSource(shp_path) srs = osr.SpatialReference(wkt=img2.GetProjection()) dst_layername = "Shape" dst_layer = dst_ds.CreateLayer(dst_layername, srs=srs) raster_field = ogr.FieldDefn('id', type_mapping[srcband.DataType]) dst_layer.CreateField(raster_field) gdal.Polygonize(srcband, srcband, dst_layer, 0, [], callback=None) img2.FlushCache() del img2, srcband filt = ogr.CreateGeometryFromWkt(dam_wkt) WGS84 = osr.SpatialReference() WGS84.ImportFromEPSG(4326) tf = osr.CoordinateTransformation(WGS84, srs) filt.Transform(tf) asum = [] for i in dst_layer: area = i.GetGeometryRef().GetArea() if i.GetGeometryRef().Intersect(filt) and area > 5000: asum.append(area) else: dst_layer.DeleteFeature(i.GetFID()) print("Water Area of", date, ":", sum(asum)) del dst_ds
def consolidate(inputfile): # Now combine all subsets into a macroset # 4 create a new data source and layer fn = dir_base_name + '-traced.shp' # 2 get the shapefile driver driver = ogr.GetDriverByName('ESRI Shapefile') # 3 open the input data source and get the layer shapefile = dir_base_name + '.shp' inDS = driver.Open(shapefile, 0) #shows cover at given points if inDS is None: print 'Could not open shapefile' sys.exit(1) inLayer = inDS.GetLayer() # 5 get the FieldDefn's for the id and cover fields in the input shapefile feature = inLayer.GetFeature(0) idFieldDefn = feature.GetFieldDefnRef('DN') if os.path.exists(fn): driver.DeleteDataSource(fn) outDS = driver.CreateDataSource(fn) if outDS is None: print 'Could not create final shapefile' sys.exit(1) outLayer = outDS.CreateLayer(base_name, geom_type=ogr.wkbPolygon) #create new field in the output shapefile outLayer.CreateField(idFieldDefn) # 6 get the FeatureDefn for the output layer featureDefn = outLayer.GetLayerDefn() # new field definitions for this shapefile # color definition colorDefn = ogr.FieldDefn("Color", ogr.OFTInteger) colorDefn.SetWidth(2) colorDefn.SetPrecision(0) outLayer.CreateField(colorDefn) # dot count definition dotCountDefn = ogr.FieldDefn("DotCount", ogr.OFTInteger) dotCountDefn.SetWidth(2) dotCountDefn.SetPrecision(0) outLayer.CreateField(dotCountDefn) # dot type definition dotTypeDefn = ogr.FieldDefn("DotType", ogr.OFTInteger) dotTypeDefn.SetWidth(1) dotTypeDefn.SetPrecision(0) outLayer.CreateField(dotTypeDefn) # cross count definition crossCountDefn = ogr.FieldDefn("CrossCount", ogr.OFTInteger) crossCountDefn.SetWidth(2) crossCountDefn.SetPrecision(0) outLayer.CreateField(crossCountDefn) # cross data definition crossDataDefn = ogr.FieldDefn("CrossData", ogr.OFTString) crossDataDefn.SetWidth(255) outLayer.CreateField(crossDataDefn) # add lat/lon as OFTReal attributes outLayer.CreateField(ogr.FieldDefn("CentroidY", ogr.OFTReal)) outLayer.CreateField(ogr.FieldDefn("CentroidX", ogr.OFTReal)) polygonfiles = [] for files in os.listdir(path): if files.endswith(".shp") and files.find('-polygon') != -1: polygonfile = path + "/" + files # apply a projection so gdalwarp doesnt complain polygonfilename = files[:files.find(".shp")] os.system("cp " + dir_base_name + ".prj " + path + "/" + polygonfilename + ".prj") extractedfile = path + "/" + polygonfilename + "-extracted.tif" # extract bitmap from original command = "gdalwarp -q -t_srs EPSG:3785 -cutline " + polygonfile + " -crop_to_cutline -of GTiff " + inputfile + " " + extractedfile logging.debug(command) # print command os.system(command) # calculate color # shrink to 1x1 and find value # logging.debug( string.join(["convert", "-quiet", os.path.abspath(extractedfile), "-resize", "1x1","txt:-"]) ) # pixelvalue = subprocess.Popen(["convert", "-quiet", os.path.abspath(extractedfile), "-resize", "1x1","txt:-"], stdout=subprocess.PIPE).communicate()[0] # pattern = re.compile(r"0,0: \(([\s0-9]*),([\s0-9]*),([\s0-9]*).*") # values = pattern.findall(pixelvalue) extractedpath = os.path.abspath(extractedfile) if os.path.exists(extractedpath) == False: continue values = average_color(extractedpath) if len(values) > 0: red = int(values[0]) green = int(values[1]) blue = int(values[2]) nearest = 100000 nearestcolor = [] nearestcolorindex = -1 for i, color in enumerate(basecolors): dred = (color[0] - red) * (color[0] - red) dgreen = (color[1] - green) * (color[1] - green) dblue = (color[2] - blue) * (color[2] - blue) dist = dred + dgreen + dblue if dist < nearest: nearest = dist nearestcolor = color nearestcolorindex = i # only add if NOT paper if nearestcolor != basecolors[0]: # check for dots circle_data = cv_feature_detect(extractedfile) # add to array polygonfiles.append( [polygonfile, nearestcolorindex, circle_data]) else: logging.debug("Ignored (paper color): " + polygonfilename + "\n") else: logging.debug("Ignored (regex match error): " + polygonfilename + "\n") for files in polygonfiles: # 3 open the input data source and get the layer tempfile = files[ 0] #dir_base_name + '-tmp-' + str(currentsubset) + '-traced.shp' inDS = driver.Open(tempfile, 0) #shows cover at given points if inDS is None: print 'Could not open temporary shapefile' break inLayer = inDS.GetLayer() # 7 loop through the input features inFeature = inLayer.GetNextFeature() while inFeature: # create a new feature outFeature = ogr.Feature( featureDefn) #using featureDefn created in step 6 # set the geometry geom = inFeature.GetGeometryRef() outFeature.SetGeometry(geom) #move it to the new feature DN = inFeature.GetField('DN') outFeature.SetField('DN', DN) #move it to the new feature outFeature.SetField('Color', int(files[1])) outFeature.SetField('DotCount', int(files[2]["count"])) outFeature.SetField('DotType', int(files[2]["is_outline"])) outFeature.SetField('CrossCount', int(files[2]["cross_count"])) outFeature.SetField('CrossData', str(files[2]["cross_data"])) source_srs = osr.SpatialReference() source_srs.ImportFromEPSG(3785) # NOTE: notice this is hardcoded target_srs = osr.SpatialReference() target_srs.ImportFromEPSG(4326) # NOTE: notice this is hardcoded transform = osr.CoordinateTransformation(source_srs, target_srs) centroid = geom.Centroid() centroid.Transform(transform) outFeature.SetField('CentroidY', centroid.GetY()) outFeature.SetField('CentroidX', centroid.GetX()) # outFeature.SetField('circle_count', files[2]["circle_count"]) # outFeature.SetField('circle_type', files[2]["is_outline"]) # add the feature to the output layer outLayer.CreateFeature(outFeature) # destroy the output feature outFeature.Destroy() # destroy the input feature and get a new one inFeature.Destroy() inFeature = inLayer.GetNextFeature() # close the data sources inDS.Destroy() outDS.Destroy() #flush out the last changes here print "" print "Applying projection file to result..." print "-------------------------------------" os.system("cp " + dir_base_name + ".prj " + dir_base_name + "-traced.prj")
baer_upper_left_y = baer_geotransform[3] baer_ns_rotation = baer_geotransform[4] baer_pixel_height = baer_geotransform[5] baer_xsize = baer_ds.RasterXSize baer_ysize = baer_ds.RasterYSize # Sometimes BAER data is in UTM projection instead of Albers projection # Project BAER data into Albers, if necessary, using Nearest Neighbor algorithm print baer_ds.GetProjection() raise SystemExit if "utm" in baer_file_path: print 'utm' # Define transformation from input projection to output projection utm_proj = osr.SpatialReference(baer_ds.GetProjection()) alb_proj = osr.SpatialReference(PROJ) transformation = osr.CoordinateTransformation( utm_proj, alb_proj) # Calculate geotransform of projected raster # top left corner (ulx, uly, ulz) = transformation.TransformPoint( baer_upper_left_x, baer_upper_left_y) # bottom right corner (lrx, lry, lrz) = transformation.TransformPoint( baer_upper_left_x + baer_pixel_width * baer_xsize, baer_upper_left_y + baer_pixel_height * baer_ysize) # bottom left corner (llx, lly, llz) = transformation.TransformPoint( baer_upper_left_x, baer_upper_left_y + baer_pixel_height * baer_ysize) # top right corner (urx, ury, urz) = transformation.TransformPoint(
##outfile=open("%s/wkt.txt"%(os.getcwd()),'w') ##outfile.write(k) ##outfile.close() #the following takes the LANDFIRE projection in well known text format src_ref = osr.SpatialReference() LF_prj = """PROJCS["NAD_1983_Albers",GEOGCS["GCS_North_American_1983",DATUM["North_American_Datum_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["longitude_of_center",-96.0],PARAMETER["Standard_Parallel_1",29.5],PARAMETER["Standard_Parallel_2",45.5],PARAMETER["latitude_of_center",23.0],UNIT["Meter",1.0]]""" src_ref.ImportFromWkt(LF_prj) #sink spatial ref sink_ref = osr.SpatialReference() sink_ref.SetWellKnownGeogCS( "WGS84") #this is the GoogleEarth Native projection #coordinate transformation project = osr.CoordinateTransformation(src_ref, sink_ref) infile = open("%s/z%s_ge_inputs.csv" % (working_dir, Zone), 'r') header = infile.readline() data = infile.readlines() infile.close() data_LUT = dict() for line in data: photo_names = [] dom_spps = [] #list of triads line = line.split(",") Master_ID = int(line[0]) RegID = line[1]
def zonal_stats(feat, input_zone_polygon, input_value_raster): # Open data raster = gdal.Open(input_value_raster) shp = ogr.Open(input_zone_polygon) lyr = shp.GetLayer() # Get raster georeference info transform = raster.GetGeoTransform() xOrigin = transform[0] yOrigin = transform[3] pixelWidth = transform[1] pixelHeight = transform[5] # Reproject vector geometry to same projection as raster sourceSR = lyr.GetSpatialRef() targetSR = osr.SpatialReference() targetSR.ImportFromWkt(raster.GetProjectionRef()) coordTrans = osr.CoordinateTransformation(sourceSR, targetSR) #feat = lyr.GetNextFeature() lyr.SetAttributeFilter('FID = ' + str(feat.GetFID())) geom = feat.GetGeometryRef() geom.Transform(coordTrans) # Get extent of feat geom = feat.GetGeometryRef() if (geom.GetGeometryName() == 'MULTIPOLYGON'): count = 0 pointsX = [] pointsY = [] for polygon in geom: geomInner = geom.GetGeometryRef(count) ring = geomInner.GetGeometryRef(0) numpoints = ring.GetPointCount() for p in range(numpoints): lon, lat, z = ring.GetPoint(p) pointsX.append(lon) pointsY.append(lat) count += 1 elif (geom.GetGeometryName() == 'POLYGON'): ring = geom.GetGeometryRef(0) numpoints = ring.GetPointCount() pointsX = [] pointsY = [] for p in range(numpoints): lon, lat, z = ring.GetPoint(p) pointsX.append(lon) pointsY.append(lat) else: sys.exit("ERROR: Geometry needs to be either Polygon or Multipolygon") xmin = min(pointsX) xmax = max(pointsX) ymin = min(pointsY) ymax = max(pointsY) # Specify offset and rows and columns to read xoff = int((xmin - xOrigin) / pixelWidth) yoff = int((yOrigin - ymax) / pixelWidth) xcount = int((xmax - xmin) / pixelWidth) + 1 ycount = int((ymax - ymin) / pixelWidth) + 1 #print xmin, xmax, ymin, ymax, xoff, yoff, xcount, ycount # Create memory target raster target_ds = gdal.GetDriverByName('GTiff').Create('test.tif', xcount, ycount, gdal.GDT_Byte) target_ds.SetGeoTransform(( xmin, pixelWidth, 0, ymax, 0, pixelHeight, )) # Create for target raster the same projection as for the value raster raster_srs = osr.SpatialReference() raster_srs.ImportFromWkt(raster.GetProjectionRef()) target_ds.SetProjection(raster_srs.ExportToWkt()) # Rasterize zone polygon to raster gdal.RasterizeLayer(target_ds, [1], lyr, burn_values=[1]) # Read raster as arrays banddataraster = raster.GetRasterBand(1) dataraster = banddataraster.ReadAsArray(xoff, yoff, xcount, ycount).astype(numpy.int) bandmask = target_ds.GetRasterBand(1) datamask = bandmask.ReadAsArray(0, 0, xcount, ycount).astype(numpy.float) # Mask zone of raster zoneraster = numpy.ma.masked_array(dataraster, numpy.logical_not(datamask)) # Calculate statistics of zonal raster u, indices = numpy.unique(zoneraster, return_inverse=True) count = numpy.bincount(zoneraster.ravel()) mat = numpy.zeros(223, dtype=int) if len(count) != 223: zer = numpy.zeros(223 - len(count), dtype=int) new = numpy.append(count, zer) return new else: return count
def addGPXToModel(pr, npim, dem, importedGPX, gpxPathHeight, gpxPixelsBetweenPoints, gpxPathThickness, trlat, trlon, bllat, bllon): """ Add 1 or more GPX tracks to the terrain model Args: pr (function): reference to the logging function npim (2d numpy array): The numpy array containing elevation data for points on the 3D terrain map dem (GDAL raster dataset): The GDAL raster dataset is needed since it contains the projection used by the elevation data importedGPX (list): List of strings which reference the file paths for GPX tracks. gpxPathHeightHeight (int): height offset, in meters, from the terrain elevation to denote a GPX track. Negative numbers are ok. gpxPixelsBetweenPoints (int): GPX Files can have a lot of points. This argument controls how many pixel distance there should be between points, effectively causing fewing lines to be drawn. A higher number will create more space between lines drawn on the model and can have the effect of making the paths look a bit cleaner at the expense of less precision gpxPathThickness (int): Stacks parallel lines on either side of the primary line to create thickness. trlat (float): top right latitude of the terrain map. trlon (float): top right longitude of the terrain map. bllat (float): bottom left latitude of the terrain map. bllat (float): bottom left longitude of the terrain map. Returns: a modified npim array that now contains adjusted elevation data such that the GPX path(s) will be recognizable on the terrain model """ import xml.etree.ElementTree as ET # check if we have to explicitly override the PROJ_LIB folder so osr has access to # the projection database proj.db. Otherwise source.ImportFromEPSG(4326) (setting up WGS84) # won't work and you'll get this: # ERROR 1: PROJ: proj_create_from_database: Cannot find proj.db # ERROR 1: PROJ: proj_create: unrecognized format / unknown name # ERROR 6: Cannot find coordinate operations from `' to `PROJCRS["WGS 84 / .... (your DEM SRS which did work b/c it used a different call ...) # however this is just printed out somewhere (stderr?) and doesn't generate an exception! (Super helpful ...) # Confusingly, what really bombs is transform.TransformPoint(gpx_lat, gpx_lon) later, when # it can't transform into the non-existing target SRS and, even more "helpfully" then complains # about a missing signature for the underlying C++ method: # NotImplementedError: Wrong number or type of arguments for overloaded function 'CoordinateTransformation_TransformPoint'. # Wrong number or type of arguments for overloaded function 'CoordinateTransformation_TransformPoint'. # Possible C/C++ prototypes are: # OSRCoordinateTransformationShadow::TransformPoint(double [3]) # OSRCoordinateTransformationShadow::TransformPoint(double [4]) # OSRCoordinateTransformationShadow::TransformPoint(double [3],double,double,double) # OSRCoordinateTransformationShadow::TransformPoint(double [4],double,double,double,double) # In my case (Win10) the proj folder had been installed but the PROJ_LIB env var didn't point to it for some reason. # If this happens to you, figure out which folder proj.db is in (for me it's anaconda3\Lib\site-packages\osgeo\data\proj) and # put it into common/config.py under PROJ_DIR from touchterrain.common import config # non-server config settings if config.PROJ_DIR != None: # we got an override setting! import os os.environ['PROJ_LIB'] = config.PROJ_DIR print("PROJ_LIB OSGEO projection folder was set to", os.environ['PROJ_LIB']) # Later versions of osr may be bundled into osgeo so check there as well. try: import osr except ImportError as err: from osgeo import osr import time import math gpxStartTime = time.time() pathedPoints = {} # Parse GPX file(s) ulx, xres, xskew, uly, yskew, yres = dem.GetGeoTransform() target = osr.SpatialReference() t_res = target.ImportFromWkt(dem.GetProjection()) # return of 0 => OK if t_res != 0: assert False, "addGPXToMode(): target.ImportFromWkt() returned error" + str( t_res) source = osr.SpatialReference() s_res = source.ImportFromEPSG(4326) # This is WGS84, return of 0 => OK if s_res != 0: assert False, "addGPXToMode(): source.ImportFromEPSG(4326) returned error" + str( s_res) for gpxFile in importedGPX: pr(f"process gpx file: {gpxFile}") # parse file for points and tracks tree = ET.parse(gpxFile) root = tree.getroot() points = root.find( '{http://www.topografix.com/GPX/1/1}trk/{http://www.topografix.com/GPX/1/1}trkseg' ) tracks = root.findall( '{http://www.topografix.com/GPX/1/1}trk/{http://www.topografix.com/GPX/1/1}trkseg' ) numTracks = len(tracks) pr(numTracks, "GPX tracks found") for trk in tracks: pr("Plotting track", numTracks) numTracks -= 1 # We need to keep track of the last point so that we can draw a line between points lastPoint = None count = 0 for trkpt in trk: count = count + 1 gpx_lat = float(trkpt.attrib['lat']) gpx_lon = float(trkpt.attrib['lon']) #pr(" Process GPX Point: Lat: {0} Lon: {1}:".format( gpx_lat, gpx_lon ) ) transform = osr.CoordinateTransformation(source, target) projectedPoints = transform.TransformPoint(gpx_lat, gpx_lon) rasterX = int((projectedPoints[1] - uly) / yres) rasterY = int((projectedPoints[0] - ulx) / xres) # Only process this point if it's in the bounds if rasterX >= 0 and rasterX < npim.shape[ 0] and rasterY >= 0 and rasterY < npim.shape[1]: currentPoint = (rasterX, rasterY) # Draw line between two points using Bresenham's line algorithm if lastPoint is not None: # Calculate distance between last point and current point # Only plot the point if it's far away. Helps cull some GPX points dist = math.sqrt((rasterX - lastPoint[0])**2 + (rasterY - lastPoint[1])**2) # Only render the GPX point if it's beyond the specified distance OR # if it's the last point if dist >= gpxPixelsBetweenPoints or count == len( points) - 1: #try creating a dashed path by plotting every other line #pr("primaryLine") plotLine(lastPoint[0], lastPoint[1], currentPoint[0], currentPoint[1], gpxPathHeight, npim, pathedPoints, 0) # Create line thickness by stacking lines thicknessOffset = 1 for loopy in range(1, int(gpxPathThickness)): #pr("thickerLine") plotLine(lastPoint[0], lastPoint[1], currentPoint[0], currentPoint[1], gpxPathHeight, npim, pathedPoints, thicknessOffset) # Alternate sides of line to draw on when adding thickness if (loopy % 2) == 0: thicknessOffset = (thicknessOffset * -1) + 1 else: thicknessOffset = thicknessOffset * -1 lastPoint = currentPoint else: lastPoint = currentPoint else: # If a point is out of bounds, we need to invalidate lastPoint #pr("out of bounds: {0},{1}".format(gpx_lat, gpx_lon) ) lastPoint = None gpxEndTime = time.time() gpxElapsedTime = gpxEndTime - gpxStartTime pr(f"Time to add GPX paths:{gpxElapsedTime}")
def gsv_pano_metadata_collector(samples_feature_class, num, output_text_folder): """ This function is used to call the Google API url to collect the metadata of Google Street View Panoramas. The input of the function is the shpfile of the create sample site, the output is the generate panoinfo metrics stored in the text file Parameters: samples_feature_class: the shapefile of the create sample sites num: the number of sites processed every time output_text_folder: the output folder for the panoinfo """ from urllib import request import xmltodict import ogr import osr import time import os.path import math if not os.path.exists(output_text_folder): os.makedirs(output_text_folder) driver = ogr.GetDriverByName('ESRI Shapefile') # change the projection of shapefile to the WGS84 dataset = driver.Open(samples_feature_class) layer = dataset.GetLayer() source_proj = layer.GetSpatialRef() target_proj = osr.SpatialReference() target_proj.ImportFromEPSG(4326) transform = osr.CoordinateTransformation(source_proj, target_proj) # loop all the features in the featureclass # feature = layer.GetNextFeature() feature_num = layer.GetFeatureCount() batch = int(math.ceil(feature_num / num)) print(batch) for b in range(batch): # for each batch process num GSV site start = b * num end = (b + 1) * num if end > feature_num: end = feature_num output_text_file = 'Pnt_start%s_end%s.txt' % (start, end) output_gsv_info_file = os.path.join(output_text_folder, output_text_file) # skip over those existing txt files if os.path.exists(output_gsv_info_file): continue time.sleep(1) with open(output_gsv_info_file, 'w') as panoInfoText: # process num feature each time for i in range(start, end): feature = layer.GetFeature(i) geom = feature.GetGeometryRef() # transform the current projection of input shapefile to WGS84 # WGS84 is Earth centered, earth fixed terrestrial ref system geom.Transform(transform) # TODO check what is happening with axis order lon = geom.GetY() lat = geom.GetX() # get the meta data of panoramas url_address = 'http://maps.google.com/cbk?output=xml&ll=%s,%s' % (lat, lon) time.sleep(0.05) # the output result of the meta data is a xml object meta_dataxml = request.urlopen(url_address) meta_data = meta_dataxml.read() data = xmltodict.parse(meta_data) # in case there is not panorama in the site, therefore, continue if data['panorama'] is None: continue else: pano_info = data['panorama']['data_properties'] # get the meta data of the panorama pano_date = list(pano_info.items())[4][1] pano_id = list(pano_info.items())[5][1] pano_lat = list(pano_info.items())[8][1] pano_lon = list(pano_info.items())[9][1] print('The coordinate (%s,%s), panoId is: %s, panoDate is: %s' % (pano_lon, pano_lat, pano_id, pano_date)) line_txt = 'panoID: %s panoDate: %s longitude: %s latitude: %s\n' % (pano_id, pano_date, pano_lon, pano_lat) panoInfoText.write(line_txt) panoInfoText.close()
def fn_mask(vec, rast): shp = ogr.Open(vec) lyr = shp.GetLayer() feat = lyr.GetFeature(0) # Open data raster = gdal.Open(rast) shp = ogr.Open(vec) lyr = shp.GetLayer() # Get raster georeference info transform = raster.GetGeoTransform() x_origin = transform[0] y_origin = transform[3] pixel_width = transform[1] pixel_height = transform[5] # project vector geometry to same projection as raster source_sr = lyr.GetSpatialRef() target_sr = osr.SpatialReference() target_sr.ImportFromWkt(raster.GetProjectionRef()) coord_trans = osr.CoordinateTransformation(source_sr, target_sr) # feat = lyr.GetNextFeature() geom = feat.GetGeometryRef() geom.Transform(coord_trans) # Get extent of feat geom = feat.GetGeometryRef() if geom.GetGeometryName() == 'MULTIPOLYGON': feat_count = 0 points_x = [] points_y = [] for polygon in geom: geom_inner = geom.GetGeometryRef(feat_count) ring = geom_inner.GetGeometryRef(0) numpoints = ring.GetPointCount() for p in range(numpoints): lon, lat, z = ring.GetPoint(p) points_x.append(lon) points_y.append(lat) feat_count += 1 elif geom.GetGeometryName() == 'POLYGON': ring = geom.GetGeometryRef(0) numpoints = ring.GetPointCount() points_x = [] points_y = [] for p in range(numpoints): lon, lat, z = ring.GetPoint(p) points_x.append(lon) points_y.append(lat) else: sys.exit("ERROR: Geometry needs to be either Polygon or Multipolygon") xmin = min(points_x) xmax = max(points_x) ymin = min(points_y) ymax = max(points_y) # Specify offset and rows and columns to read xoff = int((xmin - x_origin) / pixel_width) yoff = int((y_origin - ymax) / pixel_width) xcount = int((xmax - xmin) / pixel_width) + 1 ycount = int((ymax - ymin) / pixel_width) + 1 # Create memory target raster target_ds = gdal.GetDriverByName('MEM').Create('', xcount, ycount, gdal.GDT_Byte) target_ds.SetGeoTransform((xmin, pixel_width, 0, ymax, 0, pixel_height,)) # Create for target raster the same projection as for the value raster raster_srs = osr.SpatialReference() raster_srs.ImportFromWkt(raster.GetProjectionRef()) target_ds.SetProjection(raster_srs.ExportToWkt()) # Rasterize zone polygon to raster gdal.RasterizeLayer(target_ds, [1], lyr, burn_values=[1]) # Read raster as arrays band_data_raster = raster.GetRasterBand(1) data_raster = band_data_raster.ReadAsArray(xoff, yoff, xcount, ycount).astype(np.float) band_mask = target_ds.GetRasterBand(1) data_mask = band_mask.ReadAsArray(0, 0, xcount, ycount).astype(np.float) # Mask zone of raster zone_raster = np.ma.masked_array(data_raster, np.logical_not(data_mask)) return zone_raster
def _import(self, filename, crop=False): self.stdout.write( 'Importing {} to postgresql... This can take some time (over 10 minutes for large layers)' .format(filename)) self.cursor.execute('CREATE SCHEMA IF NOT EXISTS ' + self.schema_name) self.cursor.execute('CREATE EXTENSION IF NOT EXISTS postgis') self.cursor.execute('CREATE EXTENSION IF NOT EXISTS hstore') db = settings.DATABASES['default'] connectionString = "PG:dbname='%s' host='%s' port='%s' user='******' password='******'" % ( db['NAME'], db['HOST'], db['PORT'], db['USER'], db['PASSWORD']) ogrds = ogr.Open(connectionString) wgs84 = osr.SpatialReference() wgs84.ImportFromEPSG(4326) # table_name = filename.split('/')[-1][:-4] table_name = 'offline_osm' ogr_source = ogr.Open(os.path.join(self.download_dir, filename)) for ogr_layer in ogr_source: self.stdout.write('Importing sublayer {}'.format( ogr_layer.GetName())) if len(ogr_source) == 1: full_table_name = table_name full_table_name = self.schema_name + '.' + table_name else: full_table_name = table_name + '_' + ogr_layer.GetName() qulfd_table_name = self.schema_name + '.' + full_table_name cursor = self.cursor sql = 'SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema=%s AND table_name=%s)' cursor.execute(sql, [self.schema_name, full_table_name]) layer_exists = cursor.fetchall()[0][0] if not layer_exists or not self.options['no_overwrite']: self.stdout.write( ' layer does not exists or no_overwrite unset, we import...' ) if crop: bbox = settings.OFFLINE_OSM_BBOX wkt_string = 'POLYGON(({x1} {y1},{x2} {y1},{x2} {y2},{x1} {y2},{x1} {y1}))'.format( x1=bbox[0][0], y1=bbox[0][1], x2=bbox[1][0], y2=bbox[1][1]) bbox_geom = ogr.CreateGeometryFromWkt(wkt_string) ogr_layer_ref = int( ogr_layer.GetSpatialRef().GetAuthorityCode("PROJCS") or 4326) if ogr_layer_ref != 4326: source, target = osr.SpatialReference( ), osr.SpatialReference() source.ImportFromEPSG(4326) target.ImportFromEPSG(ogr_layer_ref) transform = osr.CoordinateTransformation( source, target) bbox_geom.Transform(transform) ogr_layer.SetSpatialFilter(bbox_geom) ogr_postgres_layer = ogrds.CopyLayer( ogr_layer, qulfd_table_name, [ 'OGR_INTERLEAVED_READING=YES', 'OVERWRITE=YES', 'COLUMN_TYPES=other_tags=hstore' ]) else: self.stdout.write(' layer already exists, we skip.')