def main(SOURCE_LINES, tempf, split_size, max_elements, progressslot=None, dtm=None, dtmThres=0): driver = ogr.GetDriverByName("ESRI Shapefile") if not os.path.exists(tempf): os.mkdir(tempf) collected_file = os.path.join(tempf, "collected.shp") if os.path.exists(collected_file): driver.DeleteDataSource(collected_file) dsOutC = driver.CreateDataSource(collected_file) outLayerC = dsOutC.CreateLayer("collected", geom_type=ogr.wkbLineString) fDefC = outLayerC.GetLayerDefn() ang_list = [] doublenode_list = [] dtm_list = [] # if source.shp is not a file, use it as regexp pattern for input_file in glob.glob(SOURCE_LINES): dsIn = driver.Open(input_file, 0) # 0 means read-only layerIn = dsIn.GetLayer() for feat in layerIn: geom_ref = feat.GetGeometryRef() if geom_ref is not None: for i in range(0, geom_ref.GetPointCount()): if i < geom_ref.GetPointCount()-1: pt1 = list(geom_ref.GetPoint(i)) pt2 = list(geom_ref.GetPoint(i+1)) del pt1[2] del pt2[2] outGeom = ogr.Geometry(ogr.wkbLineString) outGeom.AddPoint(*pt1) outGeom.AddPoint(*pt2) outFeat = ogr.Feature(fDefC) outFeat.SetGeometry(outGeom) outLayerC.CreateFeature(outFeat) outFeat = None if (pt1 == pt2): doublenode_list.append(pt1) if i > 0: pt0 = list(geom_ref.GetPoint(i-1)) del pt0[2] v10 = np.array([pt0[i]-pt1[i] for i in range(len(pt1))]) v12 = np.array([pt2[i]-pt1[i] for i in range(len(pt1))]) v10n = np.linalg.norm(v10) v12n = np.linalg.norm(v12) if v10n == 0 or v12n == 0: continue v10u = v10/v10n v12u = v12/v12n ang = np.arccos(np.clip(np.dot(v10u, v12u), -1.0, 1.0)) * 180/np.pi if ang <= 90: # spitze winkel ang_list.append([pt1, ang]) if dtm: pt = list(geom_ref.GetPoint(i)) rlayer = dtm dx = rlayer.rasterUnitsPerPixelX() dy = rlayer.rasterUnitsPerPixelY() xpos = pt[0] ypos = pt[1] # assume pixel = center xll = rlayer.extent().xMinimum() + 0.5 * dx yll = rlayer.extent().yMinimum() + 0.5 * dy xoffs = (pt[0] - xll) % dx yoffs = (pt[1] - yll) % dy dtm_val_ll = rlayer.dataProvider().identify(QgsPointXY(xpos - dx / 2, ypos - dy / 2), QgsRaster.IdentifyFormatValue).results()[1] dtm_val_ur = rlayer.dataProvider().identify(QgsPointXY(xpos + dx / 2, ypos + dy / 2), QgsRaster.IdentifyFormatValue).results()[1] dtm_val_lr = rlayer.dataProvider().identify(QgsPointXY(xpos + dx / 2, ypos - dy / 2), QgsRaster.IdentifyFormatValue).results()[1] dtm_val_ul = rlayer.dataProvider().identify(QgsPointXY(xpos - dx / 2, ypos + dy / 2), QgsRaster.IdentifyFormatValue).results()[1] if all([dtm_val_ll, dtm_val_lr, dtm_val_ul, dtm_val_ur]): a00 = dtm_val_ll a10 = dtm_val_lr - dtm_val_ll a01 = dtm_val_ul - dtm_val_ll a11 = dtm_val_ur + dtm_val_ll - (dtm_val_lr + dtm_val_ul) dtm_bilinear = a00 + a10 * xoffs + a01 * yoffs + a11 * xoffs * yoffs if abs(pt[2] - dtm_bilinear) > dtmThres: addPoint = True for (p, v) in dtm_list: if p[0] == pt[0] and p[1] == pt[1]: addPoint=False break if addPoint: dtm_list.append([pt, (dtm_bilinear-pt[2])]) SOURCE_LINES = collected_file outLayerC = None dsOutC = None PROBLEMS_NAME = "problems.shp" ERROR_NAME = "errors.shp" if os.path.exists(os.path.join(tempf, PROBLEMS_NAME)): driver.DeleteDataSource(os.path.join(tempf, PROBLEMS_NAME)) if os.path.exists(os.path.join(tempf, ERROR_NAME)): driver.DeleteDataSource(os.path.join(tempf, ERROR_NAME)) # open output file for intersection points dsOutI = driver.CreateDataSource(os.path.join(tempf, PROBLEMS_NAME)) outLayerI = dsOutI.CreateLayer(PROBLEMS_NAME[:-4], geom_type=ogr.wkbMultiPoint) type_field = ogr.FieldDefn("TYPE", ogr.OFTString) type_field.SetWidth(24) value_field = ogr.FieldDefn("VAL", ogr.OFTReal) value_field.SetPrecision(3) value_field.SetWidth(10) outLayerI.CreateField(type_field) outLayerI.CreateField(value_field) fDefI = outLayerI.GetLayerDefn() # open output file for error areas (>max_elements geoms) dsOutE = driver.CreateDataSource(os.path.join(tempf, ERROR_NAME)) outLayerE = dsOutE.CreateLayer(ERROR_NAME[:-4], geom_type=ogr.wkbPolygon) fDefE = outLayerE.GetLayerDefn() # open input file dsIn = driver.Open(SOURCE_LINES, 0) # 0 means read-only try: dsIn.ExecuteSQL("CREATE SPATIAL INDEX ON %s"%os.path.basename(SOURCE_LINES[:-4])) except: pass layerIn = dsIn.GetLayer() areas = [] if split_size > 0: (xmin, xmax, ymin, ymax) = layerIn.GetExtent() xcurr = xmin i = 0 while xcurr < xmax: ycurr = ymin while ycurr < ymax: i += 1 rect = ogr.Geometry(ogr.wkbPolygon) ring = ogr.Geometry(ogr.wkbLinearRing) ring.AddPoint(xcurr, ycurr) ring.AddPoint(xcurr + split_size, ycurr) ring.AddPoint(xcurr + split_size, ycurr + split_size) ring.AddPoint(xcurr, ycurr + split_size) ring.AddPoint(xcurr, ycurr) rect.AddGeometry(ring) areas.append(rect) ycurr += split_size xcurr += split_size else: areas.append(None) for (pt, val) in ang_list: outFeat = ogr.Feature(fDefI) outFeat.SetField("TYPE", "angle") outFeat.SetField("VAL", val) geom = ogr.Geometry(ogr.wkbMultiPoint) pgeom = ogr.Geometry(ogr.wkbPoint) pgeom.AddPoint(*pt) geom.AddGeometry(pgeom) outFeat.SetGeometryDirectly(geom) outLayerI.CreateFeature(outFeat) outFeat = None for (pt, val) in dtm_list: outFeat = ogr.Feature(fDefI) outFeat.SetField("TYPE", "distance") outFeat.SetField("VAL", val) geom = ogr.Geometry(ogr.wkbMultiPoint) pgeom = ogr.Geometry(ogr.wkbPoint) pgeom.AddPoint(*pt) geom.AddGeometry(pgeom) outFeat.SetGeometryDirectly(geom) outLayerI.CreateFeature(outFeat) outFeat = None for pt in doublenode_list: outFeat = ogr.Feature(fDefI) outFeat.SetField("TYPE", "double node") geom = ogr.Geometry(ogr.wkbMultiPoint) pgeom = ogr.Geometry(ogr.wkbPoint) pgeom.AddPoint(*pt) geom.AddGeometry(pgeom) outFeat.SetGeometryDirectly(geom) outLayerI.CreateFeature(outFeat) outFeat = None k = len(areas) counts = { 'intersection': 0, 'double lines': 0, 'overlap': 0, 'close': 0, 'errors': 0, } for (j, area) in enumerate(areas): layerIn.ResetReading() layerIn.SetSpatialFilter(area) geometries = [] if progressslot: progressslot.emit(j*100./k) for feat in layerIn: geomref = feat.GetGeometryRef() geometries.append(geomref.Clone()) while len(geometries) > 0: currgeom = geometries.pop(0) i = len(geometries) if i > max_elements: f = ogr.Feature(fDefE) f.SetGeometry(area) outLayerE.CreateFeature(f) f = None geometries = [] counts['errors'] += 1 continue for comparegeom in geometries: if currgeom.Intersects(comparegeom): if currgeom.Crosses(comparegeom): outFeat = ogr.Feature(fDefI) outFeat.SetField("TYPE", "intersection") outFeat.SetGeometry(ogr.ForceToMultiPoint(currgeom.Intersection(comparegeom))) outLayerI.CreateFeature(outFeat) outFeat = None counts['intersection'] += 1 if currgeom.Within(comparegeom) or currgeom.Contains(comparegeom): # order-dependent, complimentary outFeat = ogr.Feature(fDefI) outFeat.SetField("TYPE", "double line") outFeat.SetGeometry(ogr.ForceToMultiPoint(currgeom.Centroid())) outLayerI.CreateFeature(outFeat) outFeat = None counts['double lines'] += 1 if currgeom.Overlaps(comparegeom): outFeat = ogr.Feature(fDefI) outFeat.SetField("TYPE", "overlap") outFeat.SetGeometry(ogr.ForceToMultiPoint(currgeom.Centroid())) outLayerI.CreateFeature(outFeat) outFeat = None counts['overlap'] += 1 dist = currgeom.Distance(comparegeom) if 0 < dist < 0.02: outFeat = ogr.Feature(fDefI) outFeat.SetField("TYPE", "close") outFeat.SetField("VAL", dist) outFeat.SetGeometry(ogr.ForceToMultiPoint(currgeom.Centroid())) outLayerI.CreateFeature(outFeat) outFeat = None counts['close'] += 1
def openOGR(): #Source: That site was very helpfull: http://nullege.com/codes/show/src%40v%40e%40vectorformats-0.1%40vectorformats%40Formats%40OGR.py/85/ogr.Geometry.AddPoint_2D/python #source = "/media/Speicher/Daten/Testdaten/polygons.shp" #source = "/home/ralf/Grunddaten/testdaten/ne_110m_admin_0_countries.shp" source = "/media/Speicher/Daten/Testdaten/ne_110m_admin_0_countries.shp" #source = "/media/Speicher/Daten/Testdaten/points.shp" #source = "/media/Speicher/Daten/Testdaten/points_simple.shp" #source = "/media/Speicher/Daten/Testdaten/lines.shp" #source = "/home/ralf/Grunddaten/testdaten/lines.shp" #ds = ogr.Open( "/media/Speicher/Daten/vogtland/points-vgtl-27-01-12.shp") ds = ogr.Open(source) #+++Output+++ destination = source.split('.')[0] + '_new.shp' if os.system('find ' + destination) == 0: cache = destination.split('shp')[0] + '*' os.system('rm ' + cache) print "Had to delete" driverName = "ESRI Shapefile" drv = ogr.GetDriverByName(driverName) dn = drv.CreateDataSource(destination) #+++ if ds is None: print "Open failed.\n" #sys.exit( 1 ) else: print "Number of Layers: " + str(ds.GetLayerCount()) for i in xrange(ds.GetLayerCount()): layer = ds.GetLayer(i) layer.ResetReading() print "Number of Features: " + str(layer.GetFeatureCount()) #+++Output+++ test = layer.GetFeature(0) test2 = test.GetGeometryRef() print 'type: ' + str(test2.GetGeometryType()) newLayer = dn.CreateLayer("layer" + str(i), None, test2.GetGeometryType()) #+++ print layer.GetExtent() for index in xrange(layer.GetFeatureCount()): feature = layer.GetFeature(index) geometry = feature.GetGeometryRef() geometryN = ogr.Geometry(type=geometry.GetGeometryType()) #+++ f = ogr.Feature(feature_def=newLayer.GetLayerDefn()) #+++ if geometry.GetGeometryType() == ogr.wkbMultiPolygon: #print "MultiPolygon" + str(geometry.GetGeometryCount()) gpoly = ogr.Geometry(type=ogr.wkbPolygon) #+++ for x in xrange(geometry.GetGeometryCount()): #print "loop - geometries" ring = geometry.GetGeometryRef(x) points = badPointExtruder(str(ring)) #print ring.GetPointCount() #points = ring.GetPointCount() ... doesn't work! gring = ogr.Geometry(type=ogr.wkbLinearRing) #+++ for p in xrange(len(points[0])): #print "loop - points" #would be better...but doesn't work #lon, lat, z = ring.GetPoint(p) #gring.AddPoint_2D(lon, lat) #print float(points[0][p]) gring.AddPoint_2D(float(points[0][p]), float(points[1][p])) #+++ gpoly.AddGeometry(gring) #+++ geometryN.AddGeometry(gpoly) #+++ elif geometry.GetGeometryType() == ogr.wkbPolygon: # print "Polygon" ring = geometry.GetGeometryRef(0) points = ring.GetPointCount() gring = ogr.Geometry(type=ogr.wkbLinearRing) #+++ for p in xrange(points): lon, lat, z = ring.GetPoint(p) gring.AddPoint_2D(lon, lat) #+++ geometryN.AddGeometry(gring) #+++ elif geometry.GetGeometryType() == ogr.wkbPoint: # print "Point" lon, lat, z = geometry.GetPoint() geometryN.AddPoint_2D(lon, lat) #+++ elif geometry.GetGeometryType() == ogr.wkbMultiPoint: # print "Multipoint" #points = geometry.GetGeometryCount() points = secondBadPointExtruder(str(geometry)) for p in xrange(len(points[0])): gring = ogr.Geometry(type=ogr.wkbPoint) gring.AddPoint_2D(float(points[0][p]), float(points[1][p])) geometryN.AddGeometry(gring) # lon, lat, z = geometry.GetPoint(p) # print geometry #geometryN.AddPoint_2D(lon, lat) elif geometry.GetGeometryType() == ogr.wkbLineString: # print "LineString" points = geometry.GetPointCount() for p in xrange(points - 1): lon, lat, z = geometry.GetPoint(p) geometryN.AddPoint_2D(lon, lat) #+++ elif geometry.GetGeometryType() == ogr.wkbMultiLineString: # print "MultiLineString" for y in xrange(geometry.GetGeometryCount()): ring = geometry.GetGeometryRef(y) points = ring.GetPointCount() gring = ogr.Geometry(type=ogr.wkbLineString) #+++ for p in xrange(points): lon, lat, z = ring.GetPoint(p) gring.AddPoint_2D(lon, lat) #+++ geometryN.AddGeometry(gring) #+++ f.SetGeometry(geometryN) #+++ newLayer.CreateFeature(f) #+++
def checkextension(self, path): """ Descrpition: check the gdb """ try: logs_text = self.conf_param['logsText']["GDB"] open_file_GDB = ogr.GetDriverByName("OpenFileGDB") self.ds = open_file_GDB.Open(path, 0) if self.ds != None: n = 0 layer_dictionary = {} field_name_array = [] while n < self.ds.GetLayerCount(): name = self.ds.GetLayer(n).GetName() if not CommonFunctions.checkLayerNomenclature( self, self.activ_code, self.root, name): _nameCorrect = False err = self.conf_param['logsText']['namingconvention'][ 'incorrect'].copy() initial_err = self.activ_code + "|" + CommonFunctions.split_root( self, self.root, self.activ_code ) + "|" + name + "|" + self.logFile.getCatValue( self.conf_param['logsText']['namingconvention'] ) + "|" + self.logFile.getIssueValue( self.conf_param['logsText'] ['namingconvention']) + "|" err.insert(0, initial_err) self.logFile.writelogs(err) aoi = None layer_dictionary[name] = { 'GeometryType': self.ds.GetLayer(n).GetGeomType(), 'LayerObject': self.ds.GetLayer(n), } layer = self.ds.GetLayer(n).GetLayerDefn() for d in range(layer.GetFieldCount()): field_name_array.append( layer.GetFieldDefn(d).GetName()) self._checkattributes(layer, field_name_array) layer_dictionary[name]["Field"] = field_name_array n += 1 #print (layer_dictionary) #print ("a") self.checkgeometry(layer_dictionary) else: inital_text_error = self.activ_code + "|" + CommonFunctions.split_root( self, self.root, self.activ_code ) + "|" + name + "|" + self.logFile.getCatValue( self.conf_param['VectorFormats'][ self.type]) + "|" + self.logFile.getIssueValue( self.conf_param['VectorFormats'][self.type]) + "|" geojson = logs_text["extension"]["NoExist"] geojson.insert(0, inital_text_error) self.logFile.writelogs(geojson) geojson.pop(0) except Exception as ex: e = self.conf_param['logsText']["KML"]["extension"]["keyError"] inital_text_error = self.activ_code + "|" + CommonFunctions.split_root( self, self.root, self.activ_code) + "|" + name + "|" + self.logFile.getCatValue( self.conf_param['VectorFormats'][ self.type]) + "|" + self.logFile.getIssueValue( self.conf_param['VectorFormats'][self.type]) + "|" e.insert(0, inital_text_error) e.insert(1, str(ex)) self.logFile.writelogs(e) e.pop(1) e.pop(1)
def build_shp(contribs, shp, args, params): logger.info("Creating shapefile of image boundaries: %s", shp) fields = ( ("IMAGENAME", ogr.OFTString, 100), ("SENSOR", ogr.OFTString, 10), ("ACQDATE", ogr.OFTString, 10), ("CAT_ID", ogr.OFTString, 30), ("RESOLUTION", ogr.OFTReal, 0), ("OFF_NADIR", ogr.OFTReal, 0), ("SUN_ELEV", ogr.OFTReal, 0), ("SUN_AZ", ogr.OFTReal, 0), ("SAT_ELEV", ogr.OFTReal, 0), ("SAT_AZ", ogr.OFTReal, 0), ("CLOUDCOVER", ogr.OFTReal, 0), ("TDI", ogr.OFTReal, 0), ("DATE_DIFF", ogr.OFTReal, 0), ("SCORE", ogr.OFTReal, 0), ) if args.calc_stats is True: fields = fields + ( ("STATS_MIN", ogr.OFTString, 80), ("STATS_MAX", ogr.OFTString, 80), ("STATS_STD", ogr.OFTString, 80), ("STATS_MEAN", ogr.OFTString, 80), ("STATS_PXCT", ogr.OFTString, 80) ) if params.median_remove is True: fields = fields + ( ("MEDIAN", ogr.OFTString, 80), ) OGR_DRIVER = "ESRI Shapefile" ogrDriver = ogr.GetDriverByName(OGR_DRIVER) if ogrDriver is None: logger.info("OGR: Driver %s is not available", OGR_DRIVER) sys.exit(-1) if os.path.isfile(shp): ogrDriver.DeleteDataSource(shp) vds = ogrDriver.CreateDataSource(shp) if vds is None: logger.info("Could not create shp") sys.exit(-1) shpd, shpn = os.path.split(shp) shpbn, shpe = os.path.splitext(shpn) rp = utils.osr_srs_preserve_axis_order(osr.SpatialReference()) rp.ImportFromWkt(params.proj) lyr = vds.CreateLayer(shpbn, rp, ogr.wkbPolygon) if lyr is None: logger.info("ERROR: Failed to create layer: %s", shpbn) sys.exit(-1) for fld, fdef, flen in fields: field_defn = ogr.FieldDefn(fld, fdef) if fdef == ogr.OFTString: field_defn.SetWidth(flen) if lyr.CreateField(field_defn) != 0: logger.info("ERROR: Failed to create field: %s", fld) for iinfo, geom in contribs: feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField("IMAGENAME", iinfo.srcfn) feat.SetField("SENSOR", iinfo.sensor) feat.SetField("ACQDATE", iinfo.acqdate.strftime("%Y-%m-%d")) feat.SetField("CAT_ID", iinfo.catid) feat.SetField("OFF_NADIR", iinfo.ona) feat.SetField("SUN_ELEV", iinfo.sunel) feat.SetField("SUN_AZ", iinfo.sunaz) feat.SetField("SAT_ELEV", iinfo.satel) feat.SetField("SAT_AZ", iinfo.sataz) feat.SetField("CLOUDCOVER", iinfo.cloudcover) feat.SetField("SCORE", iinfo.score) tdi = iinfo.tdi if iinfo.tdi else 0 feat.SetField("TDI", tdi) date_diff = iinfo.date_diff if iinfo.date_diff else -9999 feat.SetField("DATE_DIFF", date_diff) res = ((iinfo.xres+iinfo.yres)/2.0) if iinfo.xres else 0 feat.SetField("RESOLUTION", res) if args.calc_stats: if len(iinfo.stat_dct) > 0: min_list = [] max_list = [] mean_list = [] stdev_list = [] px_cnt_list = [] keys = list(iinfo.stat_dct.keys()) keys.sort() for band in keys: imin, imax, imean, istdev = iinfo.stat_dct[band] ipx_cnt = iinfo.datapixelcount_dct[band] min_list.append(str(imin)) max_list.append(str(imax)) mean_list.append(str(imean)) stdev_list.append(str(istdev)) px_cnt_list.append(str(ipx_cnt)) feat.SetField("STATS_MIN", ",".join(min_list)) feat.SetField("STATS_MAX", ",".join(max_list)) feat.SetField("STATS_MEAN", ",".join(mean_list)) feat.SetField("STATS_STD", ",".join(stdev_list)) feat.SetField("STATS_PXCT", ",".join(px_cnt_list)) if params.median_remove is True: keys = list(iinfo.median.keys()) keys.sort() median_list = [str(iinfo.median[band]) for band in keys] feat.SetField("MEDIAN", ",".join(median_list)) #logger.info("median = %s", ",".join(median_list)) feat.SetGeometry(geom) if lyr.CreateFeature(feat) != 0: logger.info("ERROR: Could not create feature for image %s", iinfo.srcfn) feat.Destroy()
def ogr_vrt_14(): if gdaltest.vrt_ds is None: return 'skip' gdal.PushErrorHandler('CPLQuietErrorHandler') try: ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test.shp') except: pass gdal.PopErrorHandler() shp_ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource( 'tmp/test.shp') shp_lyr = shp_ds.CreateLayer('test') feat = ogr.Feature(shp_lyr.GetLayerDefn()) geom = ogr.CreateGeometryFromWkt('POINT (-10 49)') feat.SetGeometryDirectly(geom) shp_lyr.CreateFeature(feat) feat.Destroy() feat = ogr.Feature(shp_lyr.GetLayerDefn()) geom = ogr.CreateGeometryFromWkt('POINT (-10 49)') feat.SetGeometryDirectly(geom) shp_lyr.CreateFeature(feat) feat.Destroy() feat = ogr.Feature(shp_lyr.GetLayerDefn()) geom = ogr.CreateGeometryFromWkt('POINT (2 49)') feat.SetGeometryDirectly(geom) shp_lyr.CreateFeature(feat) feat.Destroy() feat = ogr.Feature(shp_lyr.GetLayerDefn()) geom = ogr.CreateGeometryFromWkt('POINT (-10 49)') feat.SetGeometryDirectly(geom) shp_lyr.CreateFeature(feat) feat.Destroy() shp_ds.ExecuteSQL('CREATE SPATIAL INDEX on test') shp_ds.Destroy() vrt_xml = """ <OGRVRTDataSource> <OGRVRTLayer name="test"> <SrcDataSource relativeToVRT="0">tmp/test.shp</SrcDataSource> <SrcLayer>test</SrcLayer> <SrcRegion>POLYGON((0 40,0 50,10 50,10 40,0 40))</SrcRegion> </OGRVRTLayer> </OGRVRTDataSource>""" vrt_ds = ogr.Open(vrt_xml) vrt_lyr = vrt_ds.GetLayerByName('test') if vrt_lyr.TestCapability(ogr.OLCFastSpatialFilter) != 1: gdaltest.post_reason('Fast filter not set.') return 'fail' extent = vrt_lyr.GetExtent() if extent != (2.0, 2.0, 49.0, 49.0): gdaltest.post_reason('wrong extent') print(extent) return 'fail' if vrt_lyr.GetFeatureCount() != 1: gdaltest.post_reason('Feature count not one as expected.') return 'fail' feat = vrt_lyr.GetNextFeature() if feat.GetFID() != 2: gdaltest.post_reason('did not get fid 2.') return 'fail' geom = feat.GetGeometryRef() if geom.ExportToWkt() != 'POINT (2 49)': gdaltest.post_reason('did not get expected point geometry.') return 'fail' feat.Destroy() vrt_lyr.SetSpatialFilterRect(1, 41, 3, 49.5) if vrt_lyr.GetFeatureCount() != 1: if gdal.GetLastErrorMsg().find('GEOS support not enabled') != -1: ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource( 'tmp/test.shp') return 'skip' print(vrt_lyr.GetFeatureCount()) gdaltest.post_reason('did not get one feature on rect spatial filter.') return 'fail' vrt_lyr.SetSpatialFilterRect(1, 41, 3, 48.5) if vrt_lyr.GetFeatureCount() != 0: gdaltest.post_reason('Did not get expected zero feature count.') return 'fail' vrt_lyr.SetSpatialFilter(None) if vrt_lyr.GetFeatureCount() != 1: gdaltest.post_reason( 'Did not get expected one feature count with no filter.') return 'fail' vrt_ds.Destroy() vrt_ds = None ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test.shp') return 'success'
def main(): counter = 0 if (type == "rail"): field = "railway" elif (type == "road"): field = "category" ## Modify: Lines 40 to 50 extension = os.path.splitext(inputPath)[1] if (extension == ".shp"): driver = ogr.GetDriverByName("ESRI Shapefile") elif (extension == ".sqlite"): driver = ogr.GetDriverByName("SQLite") fd = ogr.FieldDefn("bwidth", ogr.OFTReal) dataSource = driver.Open(inputPath, 1) fd = ogr.FieldDefn("bwidth", ogr.OFTReal) fd2 = ogr.FieldDefn("layer", ogr.OFTInteger) fd3 = ogr.FieldDefn("brdtun", ogr.OFTInteger) dataSource = driver.Open(inputPath, 1) layer = dataSource.GetLayer() layer.CreateField(fd) layer.CreateField(fd2) layer.CreateField(fd3) inLayerDefn = layer.GetLayerDefn() featureCount = layer.GetFeatureCount() #using spatial reference from jpn_equi.shp to create buffer dataset = driver.Open(inputPath) layer2 = dataset.GetLayer() sref = layer2.GetSpatialRef() if not os.path.exists(bufferDir + "/" + country): os.makedirs(bufferDir + "/" + country) base = os.path.basename(inputPath) fn = os.path.splitext(base)[0] if (extension == ".shp"): bufferedFilePath = bufferDir + "/" + country + "/" + fn + ".shp" elif (extension == ".sqlite"): bufferedFilePath = bufferDir + "/" + country + "/" + fn + ".sqlite" bufferedFile = driver.CreateDataSource(bufferedFilePath) bufferedLayer = bufferedFile.CreateLayer(bufferedFilePath, sref, geom_type=ogr.wkbPolygon) bufferedFeaturedfn = bufferedLayer.GetLayerDefn() for i in range(0, inLayerDefn.GetFieldCount()): fieldDefn = inLayerDefn.GetFieldDefn(i) bufferedLayer.CreateField(fieldDefn) if (field == "category"): for feature in layer: counter = counter + 1 print(str(counter) + " / " + str(featureCount)) # bridges and tunnels if (feature.GetField("bridge") != None and feature.GetField("category") == "motorway"): feature.SetField("brdtun", 3) elif (feature.GetField("bridge") != None and feature.GetField("category") == "motorway_link"): feature.SetField("brdtun", 3) elif (feature.GetField("bridge") != None): feature.SetField("brdtun", 1) elif (feature.GetField("tunnel") == "yes"): feature.SetField("brdtun", 2) else: feature.SetField("brdtun", 4) # for highways if (feature.GetField(field) == "bridleway"): feature.SetField("layer", 24) elif (feature.GetField(field) == "construction"): feature.SetField("layer", 27) elif (feature.GetField(field) == "cycleway"): feature.SetField("layer", 23) elif (feature.GetField(field) == "footway"): feature.SetField("layer", 22) elif (feature.GetField(field) == "living_street"): feature.SetField("layer", 13) elif (feature.GetField(field) == "motorway" and feature.GetField("bridge") != None): feature.SetField("layer", 33) elif (feature.GetField(field) == "motorway" and feature.GetField("bridge") == None): feature.SetField("layer", 1) elif (feature.GetField(field) == "motorway_link" and feature.GetField("bridge") != None): feature.SetField("layer", 34) elif (feature.GetField(field) == "motorway_link" and feature.GetField("bridge") == None): feature.SetField("layer", 2) elif (feature.GetField(field) == "path"): feature.SetField("layer", 21) elif (feature.GetField(field) == "pedestrian"): feature.SetField("layer", 26) elif (feature.GetField(field) == "platform"): feature.SetField("layer", 32) elif (feature.GetField(field) == "primary"): feature.SetField("layer", 3) elif (feature.GetField(field) == "primary_link"): feature.SetField("layer", 4) elif (feature.GetField(field) == "raceway"): feature.SetField("layer", 28) elif (feature.GetField(field) == "residential"): feature.SetField("layer", 12) elif (feature.GetField(field) == "rest_area"): feature.SetField("layer", 29) elif (feature.GetField(field) == "road"): feature.SetField("layer", 30) elif (feature.GetField(field) == "secondary"): feature.SetField("layer", 7) elif (feature.GetField(field) == "secondary_link"): feature.SetField("layer", 8) elif (feature.GetField(field) == "service"): feature.SetField("layer", 14) elif (feature.GetField(field) == "services"): feature.SetField("layer", 31) elif (feature.GetField(field) == "steps"): feature.SetField("layer", 25) elif (feature.GetField(field) == "tertiary"): feature.SetField("layer", 9) elif (feature.GetField(field) == "tertiary_link"): feature.SetField("layer", 10) elif (feature.GetField(field) == "track_1"): feature.SetField("layer", 15) elif (feature.GetField(field) == "track_2"): feature.SetField("layer", 16) elif (feature.GetField(field) == "track_3"): feature.SetField("layer", 17) elif (feature.GetField(field) == "track_4"): feature.SetField("layer", 18) elif (feature.GetField(field) == "track_5"): feature.SetField("layer", 19) elif (feature.GetField(field) == "track_na"): feature.SetField("layer", 20) elif (feature.GetField(field) == "trunk"): feature.SetField("layer", 5) elif (feature.GetField(field) == "trunk_link"): feature.SetField("layer", 6) elif (feature.GetField(field) == "unclassified"): feature.SetField("layer", 11) else: feature.SetField("layer", 99) if (feature.GetField("bridge") != None and feature.GetField(field) != "motorway" and feature.GetField(field) != "motorway_link"): feature.SetField("layer", 35) if (feature.GetField(field) == None): layer.DeleteFeature(feature.GetFID()) continue if (feature.GetField("layer") > 35): layer.DeleteFeature(feature.GetFID()) continue layer.SetFeature(feature) ### buffer widths if (feature.GetField(field) == "motorway"): bfwidth = 13.6 elif (feature.GetField(field) == "motorway_link"): bfwidth = 6.5 elif (feature.GetField(field) == "primary"): bfwidth = 6.0 elif (feature.GetField(field) == "primary_link"): bfwidth = 5.5 elif (feature.GetField(field) == "trunk"): bfwidth = 9.6 elif (feature.GetField(field) == "trunk_link"): bfwidth = 6.5 elif (feature.GetField(field) == "secondary"): bfwidth = 5.3 elif (feature.GetField(field) == "secondary_link"): bfwidth = 5.1 elif (feature.GetField(field) == "tertiary"): bfwidth = 4.9 elif (feature.GetField(field) == "tertiary_link"): bfwidth = 4.5 elif (feature.GetField(field) == "unclassified"): bfwidth = 4.5 elif (feature.GetField(field) == "residential"): bfwidth = 4.5 elif (feature.GetField(field) == "living_street"): bfwidth = 4.5 elif (feature.GetField(field) == "service"): bfwidth = 2.5 elif (feature.GetField(field) == "track_1"): bfwidth = 2.5 elif (feature.GetField(field) == "track_2"): bfwidth = 2.5 elif (feature.GetField(field) == "track_3"): bfwidth = 2 elif (feature.GetField(field) == "track_4"): bfwidth = 2 elif (feature.GetField(field) == "track_5"): bfwidth = 2 elif (feature.GetField(field) == "track_na"): bfwidth = 2 elif (feature.GetField(field) == "path"): bfwidth = 1 elif (feature.GetField(field) == "footway"): bfwidth = 1.8 elif (feature.GetField(field) == "cycleway"): bfwidth = 1.5 elif (feature.GetField(field) == "bridleway"): bfwidth = 1.5 elif (feature.GetField(field) == "steps"): bfwidth = 1.5 elif (feature.GetField(field) == "pedestrian"): bfwidth = 4 elif (feature.GetField(field) == "construction"): bfwidth = 3 elif (feature.GetField(field) == "raceway"): bfwidth = 3.6 elif (feature.GetField(field) == "rest_area"): bfwidth = 6 elif (feature.GetField(field) == "road"): bfwidth = 4 elif (feature.GetField(field) == "services"): bfwidth = 6 elif (feature.GetField(field) == "platform"): bfwidth = 1.8 elif (feature.GetField(field) == "motorway on brid"): bfwidth = 13.6 elif (feature.GetField(field) == "motorway_link on bridge"): bfwidth = 6.5 else: bfwidth = 99999 continue if (feature.GetField(field) == None): layer.DeleteFeature(feature.GetFID()) if (bfwidth > 100): layer.DeleteFeature(feature.GetFID()) feature.SetField("bwidth", bfwidth) layer.SetFeature(feature) # This creates a buffer feature if (feature.GetField("layer") < 36 and bfwidth < 100): featureGeom = feature.GetGeometryRef() bufferGeom = featureGeom.Buffer(bfwidth) outFeature = ogr.Feature(bufferedFeaturedfn) outFeature.SetGeometry(bufferGeom) for i in range(0, bufferedFeaturedfn.GetFieldCount()): outFeature.SetField( bufferedFeaturedfn.GetFieldDefn(i).GetNameRef(), feature.GetField(i)) bufferedLayer.CreateFeature(outFeature) outFeature = None if (field == "railway"): for feature in layer: counter = counter + 1 print(str(counter) + " / " + str(featureCount)) # # bridges and tunnels if (feature.GetField("bridge") != None): feature.SetField("brdtun", 1) elif (feature.GetField("tunnel") == "yes"): feature.SetField("brdtun", 2) else: feature.SetField("brdtun", 3) if (feature.GetField(field) == "abandoned"): feature.SetField("layer", 2) elif (feature.GetField(field) == "construction"): feature.SetField("layer", 10) elif (feature.GetField(field) == "disused"): feature.SetField("layer", 3) elif (feature.GetField(field) == "funicular"): feature.SetField("layer", 13) elif (feature.GetField(field) == "light_rail"): feature.SetField("layer", 5) elif (feature.GetField(field) == "miniature"): feature.SetField("layer", 15) elif (feature.GetField(field) == "monorail"): feature.SetField("layer", 14) elif (feature.GetField(field) == "narrow_gauge"): feature.SetField("layer", 7) elif (feature.GetField(field) == "preserved"): feature.SetField("layer", 8) elif (feature.GetField(field) == "rail"): feature.SetField("layer", 1) elif (feature.GetField(field) == "subway" and feature.GetField("tunnel") == "yes"): feature.SetField("layer", 6) elif (feature.GetField(field) == "subway" and feature.GetField("tunnel") == "no" and feature.GetField("bridge") == "yes"): feature.SetField("layer", 11) elif (feature.GetField(field) == "subway" and feature.GetField("tunnel") == "no" and feature.GetField("bridge") == "viaduct"): feature.SetField("layer", 11) elif (feature.GetField(field) == "subway" and feature.GetField("tunnel") == None and feature.GetField("bridge") == "yes"): feature.SetField("layer", 11) elif (feature.GetField(field) == "subway" and feature.GetField("tunnel") == None and feature.GetField("bridge") == "viaduct"): feature.SetField("layer", 11) elif (feature.GetField(field) == "subway" and feature.GetField("tunnel") == None and feature.GetField("bridge") == None): feature.SetField("layer", 12) elif (feature.GetField(field) == "tram"): feature.SetField("layer", 4) elif (feature.GetField(field) == "platform"): feature.SetField("layer", 9) else: feature.SetField("layer", 99) if (feature.GetField(field) == None): layer.DeleteFeature(feature.GetFID()) continue if (feature.GetField("layer") > 15): layer.DeleteFeature(feature.GetFID()) continue layer.SetFeature(feature) ### buffer widths if (feature.GetField(field) == "rail"): bfwidth = 6 elif (feature.GetField(field) == "abandoned"): bfwidth = 2 elif (feature.GetField(field) == "disused"): bfwidth = 2 elif (feature.GetField(field) == "tram"): bfwidth = 3.5 elif (feature.GetField(field) == "light_rail"): bfwidth = 3.5 elif (feature.GetField(field) == "subway"): bfwidth = 5 elif (feature.GetField(field) == "narrow_gauge"): bfwidth = 3.5 elif (feature.GetField(field) == "preserved"): bfwidth = 3.5 elif (feature.GetField(field) == "platform"): bfwidth = 2 elif (feature.GetField(field) == "construction"): bfwidth = 8.5 elif (feature.GetField(field) == "miniature"): bfwidth = 2 elif (feature.GetField(field) == "monorail"): bfwidth = 3.5 elif (feature.GetField(field) == "funicular"): bfwidth = 3.5 else: bfwidth = 99999 continue if (feature.GetField(field) == None): layer.DeleteFeature(feature.GetFID()) if (bfwidth > 100): layer.DeleteFeature(feature.GetFID()) feature.SetField("bwidth", bfwidth) layer.SetFeature(feature) # This creates a buffer feature featureGeom = feature.GetGeometryRef() bufferGeom = featureGeom.Buffer(bfwidth) outFeature = ogr.Feature(bufferedFeaturedfn) outFeature.SetGeometry(bufferGeom) for i in range(0, bufferedFeaturedfn.GetFieldCount()): outFeature.SetField( bufferedFeaturedfn.GetFieldDefn(i).GetNameRef(), feature.GetField(i)) bufferedLayer.CreateFeature(outFeature) outFeature = None dataSource.Destroy() dataSource = None bufferedFile.Destroy() bufferedFile = None
def reproject_coordinate_system(original_layer_name, layer_name, in_shp_layer, layer_path): def get_geometry_type(geometry_name): switcher = { "POINT": ogr.wkbPoint, "MULTIPOINT": ogr.wkbMultiPoint, "LINESTRING": ogr.wkbLineString, "MULTILINESTRING": ogr.wkbMultiLineString, "POLYGON": ogr.wkbPolygon, "MULTIPOLYGON": ogr.wkbMultiPolygon } return switcher.get(geometry_name) shp_driver = ogr.GetDriverByName('ESRI Shapefile') # input SpatialReference input_srs = in_shp_layer.GetSpatialRef() # output SpatialReference output_srs = osr.SpatialReference() output_srs.ImportFromEPSG(4326) # create the CoordinateTransformation coord_trans = osr.CoordinateTransformation(input_srs, output_srs) # create the output layer output_shp_file = '{}/{}_reproj.shp'.format(layer_path, layer_name) # check if output file exists if yes delete it if os.path.exists(output_shp_file): shp_driver.DeleteDataSource(output_shp_file) # create a new Shapefile object output_shp_dataset = shp_driver.CreateDataSource(output_shp_file) # create a new layer in output Shapefile and define its geometry type feature = in_shp_layer.GetNextFeature() geometry = feature.GetGeometryRef() geometry_type = get_geometry_type(geometry.GetGeometryName()) output_shp_layer = output_shp_dataset.CreateLayer( '{}_4326'.format(layer_name), output_srs, geometry_type) in_shp_layer.ResetReading() # add fields to the new output Shapefile # get list of attribute fields # create new fields for output in_layer_def = in_shp_layer.GetLayerDefn() for i in range(0, in_layer_def.GetFieldCount()): field_def = in_layer_def.GetFieldDefn(i) output_shp_layer.CreateField(field_def) # get the output layer's feature definition output_layer_def = output_shp_layer.GetLayerDefn() # loop through the input features in_feature = in_shp_layer.GetNextFeature() while in_feature: # get the input geometry geom = in_feature.GetGeometryRef() # reproject the geometry geom.Transform(coord_trans) # create a new feature output_feature = ogr.Feature(output_layer_def) # set the geometry and attribute output_feature.SetGeometry(geom) for i in range(0, output_layer_def.GetFieldCount()): output_feature.SetField( output_layer_def.GetFieldDefn(i).GetNameRef(), in_feature.GetField(i)) # add the feature to the shapefile output_shp_layer.CreateFeature(output_feature) # destroy the features and get the next input feature output_feature.Destroy() in_feature.Destroy() in_feature = in_shp_layer.GetNextFeature() # close the shapefiles output_shp_dataset.Destroy() spatialRef = osr.SpatialReference() spatialRef.ImportFromEPSG(4326) spatialRef.MorphToESRI() prj_file = open('{}/{}_reproj.prj'.format(layer_path, layer_name), 'w') prj_file.write(spatialRef.ExportToWkt()) prj_file.close() for file_name in os.listdir(layer_path): if os.path.splitext(file_name)[0] == original_layer_name: os.remove(os.path.join(layer_path, file_name)) for file_name in os.listdir(layer_path): if os.path.splitext(file_name)[0] == '{}_reproj'.format(layer_name): extension = os.path.splitext(file_name)[1][1:].strip().lower() os.rename( os.path.join(layer_path, file_name), os.path.join(layer_path, '{}.{}'.format(original_layer_name, extension))) return '{0}:{1}'.format(output_srs.GetAuthorityName(None), output_srs.GetAuthorityCode(None))
def polygonize(): global currentchunk global totalsubsets global base_name currentchunk = 0 totalsubsets = 0 outputgdal = dir_base_name + "-gdal-tmp.tif" # QGIS POLYGONIZE print "" print "Polygonizing (coarse):" print "----------------------" shapefile = dir_base_name + '.shp' if (not os.path.isfile(shapefile)): command = 'gdal_polygonize.py ' + outputgdal + ' -f "ESRI Shapefile" ' + shapefile + ' ' + base_name logging.debug(command) # print command os.system(command) # Split resulting megapolygon file into smaller chunks # most code from: http://cosmicproject.org/OGR/cris_example_write.html print "" print "Splitting megapolygon file into chunks" print "--------------------------------------" ##### # 2 get the shapefile driver driver = ogr.GetDriverByName('ESRI Shapefile') # 3 open the input data source and get the layer inDS = driver.Open(shapefile, 0) #shows cover at given points if inDS is None: print 'Could not open shapefile' sys.exit(1) inLayer = inDS.GetLayer() # 5 get the FieldDefn's for the id and cover fields in the input shapefile feature = inLayer.GetFeature(0) idFieldDefn = feature.GetFieldDefnRef('DN') # 7 loop through the input features inFeature = inLayer.GetNextFeature() while inFeature: if currentchunk == 0 or currentchunk >= chunksize: currentchunk = 0 totalsubsets = totalsubsets + 1 # this is a new temp file # 4 create a new data source and layer fn = dir_base_name + '-tmp-' + str(totalsubsets) + '.shp' if os.path.exists(fn): driver.DeleteDataSource(fn) outDS = driver.CreateDataSource(fn) if outDS is None: print 'Could not create temp shapefile' sys.exit(1) outLayer = outDS.CreateLayer(base_name, geom_type=ogr.wkbPolygon) #create new field in the output shapefile outLayer.CreateField(idFieldDefn) # 6 get the FeatureDefn for the output layer featureDefn = outLayer.GetLayerDefn() # create a new feature outFeature = ogr.Feature( featureDefn) #using featureDefn created in step 6 # set the geometry geom = inFeature.GetGeometryRef() outFeature.SetGeometry(geom) #move it to the new feature # set the attributes DN = inFeature.GetField('DN') outFeature.SetField('DN', DN) #move it to the new feature # add the feature to the output layer outLayer.CreateFeature(outFeature) # destroy the output feature outFeature.Destroy() # destroy the input feature and get a new one inFeature.Destroy() inFeature = inLayer.GetNextFeature() currentchunk = currentchunk + 1 # close the data sources inDS.Destroy() outDS.Destroy() #flush out the last changes here print "" print "Produced " + str(totalsubsets) + " temporary shapefiles" print ""
def consolidate(inputfile): # Now combine all subsets into a macroset # 4 create a new data source and layer fn = dir_base_name + '-traced.shp' # 2 get the shapefile driver driver = ogr.GetDriverByName('ESRI Shapefile') # 3 open the input data source and get the layer shapefile = dir_base_name + '.shp' inDS = driver.Open(shapefile, 0) #shows cover at given points if inDS is None: print 'Could not open shapefile' sys.exit(1) inLayer = inDS.GetLayer() # 5 get the FieldDefn's for the id and cover fields in the input shapefile feature = inLayer.GetFeature(0) idFieldDefn = feature.GetFieldDefnRef('DN') if os.path.exists(fn): driver.DeleteDataSource(fn) outDS = driver.CreateDataSource(fn) if outDS is None: print 'Could not create final shapefile' sys.exit(1) outLayer = outDS.CreateLayer(base_name, geom_type=ogr.wkbPolygon) #create new field in the output shapefile outLayer.CreateField(idFieldDefn) # 6 get the FeatureDefn for the output layer featureDefn = outLayer.GetLayerDefn() # new field definitions for this shapefile # color definition colorDefn = ogr.FieldDefn("Color", ogr.OFTInteger) colorDefn.SetWidth(2) colorDefn.SetPrecision(0) outLayer.CreateField(colorDefn) # dot count definition dotCountDefn = ogr.FieldDefn("DotCount", ogr.OFTInteger) dotCountDefn.SetWidth(2) dotCountDefn.SetPrecision(0) outLayer.CreateField(dotCountDefn) # dot type definition dotTypeDefn = ogr.FieldDefn("DotType", ogr.OFTInteger) dotTypeDefn.SetWidth(1) dotTypeDefn.SetPrecision(0) outLayer.CreateField(dotTypeDefn) # cross count definition crossCountDefn = ogr.FieldDefn("CrossCount", ogr.OFTInteger) crossCountDefn.SetWidth(2) crossCountDefn.SetPrecision(0) outLayer.CreateField(crossCountDefn) # cross data definition crossDataDefn = ogr.FieldDefn("CrossData", ogr.OFTString) crossDataDefn.SetWidth(255) outLayer.CreateField(crossDataDefn) polygonfiles = [] for files in os.listdir(path): if files.endswith(".shp") and files.find('-polygon') != -1: polygonfile = path + "/" + files # apply a projection so gdalwarp doesnt complain polygonfilename = files[:files.find(".shp")] os.system("cp " + dir_base_name + ".prj " + path + "/" + polygonfilename + ".prj") extractedfile = path + "/" + polygonfilename + "-extracted.tif" # extract bitmap from original command = "gdalwarp -q -t_srs EPSG:3785 -cutline " + polygonfile + " -crop_to_cutline -of GTiff " + inputfile + " " + extractedfile logging.debug(command) # print command os.system(command) # calculate color # shrink to 1x1 and find value # logging.debug( string.join(["convert", "-quiet", os.path.abspath(extractedfile), "-resize", "1x1","txt:-"]) ) # pixelvalue = subprocess.Popen(["convert", "-quiet", os.path.abspath(extractedfile), "-resize", "1x1","txt:-"], stdout=subprocess.PIPE).communicate()[0] # pattern = re.compile(r"0,0: \(([\s0-9]*),([\s0-9]*),([\s0-9]*).*") # values = pattern.findall(pixelvalue) values = average_color(os.path.abspath(extractedfile)) if len(values) > 0: red = int(values[0]) green = int(values[1]) blue = int(values[2]) nearest = 100000 nearestcolor = [] nearestcolorindex = -1 for i, color in enumerate(basecolors): dred = (color[0] - red) * (color[0] - red) dgreen = (color[1] - green) * (color[1] - green) dblue = (color[2] - blue) * (color[2] - blue) dist = dred + dgreen + dblue if dist < nearest: nearest = dist nearestcolor = color nearestcolorindex = i # only add if NOT paper if nearestcolor != basecolors[0]: # check for dots circle_data = cv_feature_detect(extractedfile) # add to array polygonfiles.append( [polygonfile, nearestcolorindex, circle_data]) else: logging.debug("Ignored (paper color): " + polygonfilename + "\n") else: logging.debug("Ignored (regex match error): " + polygonfilename + "\n") for files in polygonfiles: # 3 open the input data source and get the layer tempfile = files[ 0] #dir_base_name + '-tmp-' + str(currentsubset) + '-traced.shp' inDS = driver.Open(tempfile, 0) #shows cover at given points if inDS is None: print 'Could not open temporary shapefile' break inLayer = inDS.GetLayer() # 7 loop through the input features inFeature = inLayer.GetNextFeature() while inFeature: # create a new feature outFeature = ogr.Feature( featureDefn) #using featureDefn created in step 6 # set the geometry geom = inFeature.GetGeometryRef() outFeature.SetGeometry(geom) #move it to the new feature DN = inFeature.GetField('DN') outFeature.SetField('DN', DN) #move it to the new feature outFeature.SetField('Color', int(files[1])) outFeature.SetField('DotCount', int(files[2]["count"])) outFeature.SetField('DotType', int(files[2]["is_outline"])) outFeature.SetField('CrossCount', int(files[2]["cross_count"])) outFeature.SetField('CrossData', str(files[2]["cross_data"])) # outFeature.SetField('circle_count', files[2]["circle_count"]) # outFeature.SetField('circle_type', files[2]["is_outline"]) # add the feature to the output layer outLayer.CreateFeature(outFeature) # destroy the output feature outFeature.Destroy() # destroy the input feature and get a new one inFeature.Destroy() inFeature = inLayer.GetNextFeature() # close the data sources inDS.Destroy() outDS.Destroy() #flush out the last changes here print "" print "Applying projection file to result..." print "-------------------------------------" os.system("cp " + dir_base_name + ".prj " + dir_base_name + "-traced.prj")
def main(argv): farmname = argv[1] farm = "../originalData/Farm/" + farmname + "/" + farmname + ".json" output = "../Farm/" + farmname + "/" if not os.path.exists(farm): print("Wrong address of FARM JSON") else: with open(farm) as json_file: locationX = [] locationY = [] FieldTable = {} if (os.path.exists(output + "FieldTable.json")): with open(output + "FieldTable.json", "r") as FieldTableJson: FieldTable = json.load(FieldTableJson) FieldTableJson.close() else: with open(output + "FieldTable.json", "w") as FieldTableJson: json.dump(FieldTable, FieldTableJson, indent=4) FieldTableJson.close() FieldTable["Field"] = {} FieldTable["FieldID"] = {} FieldTable["bboxs"] = {} outDriver = ogr.GetDriverByName('GeoJSON') data = json.load(json_file) PerfectField = data["PerfectField"] Field_properties = PerfectField["properties"] Farm_name = Field_properties["name"] for p in data['features']: fieldID = p['id'] fieldRealID = p["properties"]["Field_id"] fieldRealName = p["properties"]["FieldName"] FieldTable["Field"][fieldID] = fieldRealName FieldTable["FieldID"][fieldID] = fieldRealID FieldTable["bboxs"][fieldID] = [] print('ID: ' + str(p['id'])) coordinates = p["geometry"]["coordinates"] Field_name = "Field" + str(fieldID) Coordinates = [] Coordinates.append([]) fieldLocationX = [] fieldLocationY = [] for coordinate in coordinates[0]: locationX.append(coordinate[0]) locationY.append(coordinate[1]) Coordinates[0].append([coordinate[0], coordinate[1]]) # swapedCoordinates[0].append(CRS_4326To27700([coordinate[1], coordinate[0]])) fieldLocationX.append(coordinate[0]) fieldLocationY.append(coordinate[1]) FieldTable["bboxs"][fieldID] = [ min(fieldLocationX), min(fieldLocationY), max(fieldLocationX), max(fieldLocationY) ] FieldJSON = createJSON(Field_name, 4326, Farm_name, Coordinates) outputFieldPath = output + "Field" + str(fieldID) fielname = "Field" + str(fieldID) if (not os.path.exists(outputFieldPath)): os.mkdir(outputFieldPath) if (not os.path.exists(os.path.join(outputFieldPath, "shp"))): os.mkdir(os.path.join(outputFieldPath, "shp")) filePath = outputFieldPath + "/shp" + "/" + Field_name + ".json" if (not os.path.exists(filePath)): with open(filePath, 'w') as outfile: json.dump(FieldJSON, outfile) print() if (not os.path.exists(filePath[:-4] + "shp")): print(filePath[:-4] + "shp") command = "ogr2ogr -f \"ESRI Shapefile\" -s_srs EPSG:4326 -t_srs EPSG:27700 " + filePath[: -4] + "shp " + filePath[: -4] + "json" print(command) subprocess.call(command, shell=True) outputJSONPath = None FieldJSON = None Coordinates = None with open(output + "FieldTable.json", "w") as FieldTableJson: FieldTable["bbox"] = [ min(locationX), min(locationY), max(locationX), max(locationY) ] json.dump(FieldTable, FieldTableJson, indent=4) FieldTableJson.close() print(CRS_4326To27700([min(locationY), min(locationX)])) print(CRS_4326To27700([max(locationY), max(locationX)])) json_file.close()
gt = ras.GetGeoTransform() pr = ras.GetProjection() sr_ras = SpatialReferenceFromRaster(ras) shp = ogr.Open(shp_path) lyr = shp.GetLayer() sr_lyr = lyr.GetSpatialRef() # input data come in different projections. # Therefore, transformations in both ways are defined coord_trans = osr.CoordinateTransformation( sr_ras, sr_lyr) # for the filter process coord_trans_out = osr.CoordinateTransformation(sr_lyr, sr_ras) # for the output drv_mem = ogr.GetDriverByName('Memory') mem_ds = drv_mem.CreateDataSource("") repr_lyr = mem_ds.CreateLayer('', sr_ras, ogr.wkbPolygon) repr_lyr.CreateFields(lyr.schema) out_feat = ogr.Feature(lyr.GetLayerDefn()) for feat in lyr: geom = feat.geometry().Clone() geom.TransformTo(sr_ras) out_feat.SetGeometry(geom) for i in range(feat.GetFieldCount()): out_feat.SetField(i, feat.GetField(i)) repr_lyr.CreateFeature(out_feat) lyr.ResetReading out_feat = None repr_lyr_cs = repr_lyr.GetSpatialRef()
def statisticsVectorRaster(image_input, vector_input, vector_output, band_number, enable_stats_all_count, enable_stats_columns_str, enable_stats_columns_real, col_to_delete_list, col_to_add_list, class_label_dico, path_time_log, clean_small_polygons=False, format_vector='ESRI Shapefile', save_results_intermediate=False, overwrite=True): # INITIALISATION if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "image_input : " + str(image_input) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "vector_input : " + str(vector_input) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "vector_output : " + str(vector_output) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "band_number : " + str(band_number) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "enable_stats_all_count : " + str(enable_stats_all_count) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "enable_stats_columns_str : " + str(enable_stats_columns_str) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "enable_stats_columns_real : " + str(enable_stats_columns_real) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "col_to_delete_list : " + str(col_to_delete_list) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "col_to_add_list : " + str(col_to_add_list) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "class_label_dico : " + str(class_label_dico) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "clean_small_polygons : " + str(clean_small_polygons) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "overwrite : " + str(overwrite) + endC) # Constantes PREFIX_AREA_COLUMN = "S_" # Mise à jour du Log starting_event = "statisticsVectorRaster() : Compute statistic crossing starting : " timeLine(path_time_log, starting_event) # creation du fichier vecteur de sortie if vector_output == "": vector_output = vector_input # Précisé uniquement pour l'affichage else: # Copy vector_output copyVectorFile(vector_input, vector_output, format_vector) # Vérifications image_xmin, image_xmax, image_ymin, image_ymax = getEmpriseImage( image_input) vector_xmin, vector_xmax, vector_ymin, vector_ymax = getEmpriseFile( vector_output, format_vector) extension_vector = os.path.splitext(vector_output)[1] if round(vector_xmin, 4) < round(image_xmin, 4) or round( vector_xmax, 4) > round(image_xmax, 4) or round( vector_ymin, 4) < round(image_ymin, 4) or round( vector_ymax, 4) > round(image_ymax, 4): print(cyan + "statisticsVectorRaster() : " + bold + red + "image_xmin, image_xmax, image_ymin, image_ymax" + endC, image_xmin, image_xmax, image_ymin, image_ymax, file=sys.stderr) print(cyan + "statisticsVectorRaster() : " + bold + red + "vector_xmin, vector_xmax, vector_ymin, vector_ymax" + endC, vector_xmin, vector_xmax, vector_ymin, vector_ymax, file=sys.stderr) raise NameError( cyan + "statisticsVectorRaster() : " + bold + red + "The extend of the vector file (%s) is greater than the image file (%s)" % (vector_output, image_input) + endC) pixel_size = getPixelSizeImage(image_input) # Suppression des très petits polygones qui introduisent des valeurs NaN if clean_small_polygons: min_size_area = pixel_size * 2 vector_temp = os.path.splitext( vector_output)[0] + "_temp" + extension_vector cleanMiniAreaPolygons(vector_output, vector_temp, min_size_area, '', format_vector) removeVectorFile(vector_output, format_vector) renameVectorFile(vector_temp, vector_output) # Récuperation du driver pour le format shape driver = ogr.GetDriverByName(format_vector) # Ouverture du fichier shape en lecture-écriture data_source = driver.Open(vector_output, 1) # 0 means read-only - 1 means writeable. if data_source is None: print(cyan + "statisticsVectorRaster() : " + bold + red + "Impossible d'ouvrir le fichier shape : " + vector_output + endC, file=sys.stderr) sys.exit(1) # exit with an error code # Récupération du vecteur layer = data_source.GetLayer( 0) # Recuperation de la couche (une couche contient les polygones) layer_definition = layer.GetLayerDefn( ) # GetLayerDefn => returns the field names of the user defined (created) fields # ETAPE 1/4 : CREATION AUTOMATIQUE DU DICO DE VALEUR SI IL N'EXISTE PAS if enable_stats_all_count and class_label_dico == {}: image_values_list = identifyPixelValues(image_input) # Pour toutes les valeurs for id_value in image_values_list: class_label_dico[id_value] = str(id_value) # Suppression de la valeur no date à 0 if 0 in class_label_dico: del class_label_dico[0] if debug >= 2: print(class_label_dico) # ETAPE 2/4 : CREATION DES COLONNES DANS LE FICHIER SHAPE if debug >= 2: print( cyan + "statisticsVectorRaster() : " + bold + green + "ETAPE 1/3 : DEBUT DE LA CREATION DES COLONNES DANS LE FICHIER VECTEUR %s" % (vector_output) + endC) # En entrée : # col_to_add_list = [UniqueID, majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range, all, count, all_S, count_S] - all traduisant le class_label_dico en autant de colonnes # Sous_listes de col_to_add_list à identifier pour des facilités de manipulations ultérieures: # col_to_add_inter01_list = [majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range] # col_to_add_inter02_list = [majority, minority, min, max, mean, median, sum, std, unique, range, all, count, all_S, count_S] # Construction des listes intermédiaires col_to_add_inter01_list = [] # Valeurs à injecter dans des colonnes - Format String if enable_stats_columns_str: stats_columns_str_list = ['majority', 'minority'] for e in stats_columns_str_list: col_to_add_list.append(e) # Valeurs à injecter dans des colonnes - Format Nbr if enable_stats_columns_real: stats_columns_real_list = [ 'min', 'max', 'mean', 'median', 'sum', 'std', 'unique', 'range' ] for e in stats_columns_real_list: col_to_add_list.append(e) # Valeurs à injecter dans des colonnes - Format Nbr if enable_stats_all_count: stats_all_count_list = ['all', 'count'] for e in stats_all_count_list: col_to_add_list.append(e) # Valeurs à injecter dans des colonnes - si class_label_dico est non vide if class_label_dico != {}: stats_all_count_list = ['all', 'count'] for e in stats_all_count_list: if not e in col_to_add_list: col_to_add_list.append(e) # Ajout colonne par colonne if "majority" in col_to_add_list: col_to_add_inter01_list.append("majority") if "DateMaj" in col_to_add_list: col_to_add_inter01_list.append("DateMaj") if "SrcMaj" in col_to_add_list: col_to_add_inter01_list.append("SrcMaj") if "minority" in col_to_add_list: col_to_add_inter01_list.append("minority") if "min" in col_to_add_list: col_to_add_inter01_list.append("min") if "max" in col_to_add_list: col_to_add_inter01_list.append("max") if "mean" in col_to_add_list: col_to_add_inter01_list.append("mean") if "median" in col_to_add_list: col_to_add_inter01_list.append("median") if "sum" in col_to_add_list: col_to_add_inter01_list.append("sum") if "std" in col_to_add_list: col_to_add_inter01_list.append("std") if "unique" in col_to_add_list: col_to_add_inter01_list.append("unique") if "range" in col_to_add_list: col_to_add_inter01_list.append("range") # Copy de col_to_add_inter01_list dans col_to_add_inter02_list col_to_add_inter02_list = list(col_to_add_inter01_list) if "all" in col_to_add_list: col_to_add_inter02_list.append("all") if "count" in col_to_add_list: col_to_add_inter02_list.append("count") if "all_S" in col_to_add_list: col_to_add_inter02_list.append("all_S") if "count_S" in col_to_add_list: col_to_add_inter02_list.append("count_S") if "DateMaj" in col_to_add_inter02_list: col_to_add_inter02_list.remove("DateMaj") col_to_add_inter02_list.insert(0, "majority") if "SrcMaj" in col_to_add_inter02_list: col_to_add_inter02_list.remove("SrcMaj") col_to_add_inter02_list.insert(0, "majority") # Valeurs à injecter dans des colonnes - Format Nbr if enable_stats_all_count: stats_all_count_list = ['all_S', 'count_S'] for e in stats_all_count_list: col_to_add_list.append(e) # Creation de la colonne de l'identifiant unique if ("UniqueID" in col_to_add_list) or ("uniqueID" in col_to_add_list) or ( "ID" in col_to_add_list): field_defn = ogr.FieldDefn( "ID", ogr.OFTInteger ) # Création du nom du champ dans l'objet stat_classif_field_defn layer.CreateField(field_defn) if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "Creation de la colonne : ID") # Creation des colonnes de col_to_add_inter01_list ([majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range]) for col in col_to_add_list: if layer_definition.GetFieldIndex( col ) == -1: # Vérification de l'existence de la colonne col (retour = -1 : elle n'existe pas) if col == 'majority' or col == 'DateMaj' or col == 'SrcMaj' or col == 'minority': # Identification de toutes les colonnes remplies en string stat_classif_field_defn = ogr.FieldDefn( col, ogr.OFTString ) # Création du champ (string) dans l'objet stat_classif_field_defn layer.CreateField(stat_classif_field_defn) elif col == 'mean' or col == 'median' or col == 'sum' or col == 'std' or col == 'unique' or col == 'range' or col == 'max' or col == 'min': stat_classif_field_defn = ogr.FieldDefn( col, ogr.OFTReal ) # Création du champ (real) dans l'objet stat_classif_field_defn # Définition de la largeur du champ stat_classif_field_defn.SetWidth(20) # Définition de la précision du champ valeur flottante stat_classif_field_defn.SetPrecision(2) layer.CreateField(stat_classif_field_defn) if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "Creation de la colonne : " + str(col)) # Creation des colonnes reliées au dictionnaire if ('all' in col_to_add_list) or ('count' in col_to_add_list) or ( 'all_S' in col_to_add_list) or ('count_S' in col_to_add_list): for col in class_label_dico: # Gestion du nom de la colonne correspondant à la classe name_col = class_label_dico[col] if len(name_col) > 10: name_col = name_col[:10] print( cyan + "statisticsVectorRaster() : " + bold + yellow + "Nom de la colonne trop long. Il sera tronque a 10 caracteres en cas d'utilisation: " + endC + name_col) # Gestion du nom de la colonne correspondant à la surface de la classe name_col_area = PREFIX_AREA_COLUMN + name_col if len(name_col_area) > 10: name_col_area = name_col_area[:10] if debug >= 3: print( cyan + "statisticsVectorRaster() : " + bold + yellow + "Nom de la colonne trop long. Il sera tronque a 10 caracteres en cas d'utilisation: " + endC + name_col_area) # Ajout des colonnes de % de répartition des éléments du raster if ('all' in col_to_add_list) or ('count' in col_to_add_list): if layer_definition.GetFieldIndex( name_col ) == -1: # Vérification de l'existence de la colonne name_col (retour = -1 : elle n'existe pas) stat_classif_field_defn = ogr.FieldDefn( name_col, ogr.OFTReal ) # Création du champ (real) dans l'objet stat_classif_field_defn # Définition de la largeur du champ stat_classif_field_defn.SetWidth(20) # Définition de la précision du champ valeur flottante stat_classif_field_defn.SetPrecision(2) if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "Creation de la colonne : " + str(name_col)) layer.CreateField( stat_classif_field_defn) # Ajout du champ # Ajout des colonnes de surface des éléments du raster if ('all_S' in col_to_add_list) or ('count_S' in col_to_add_list): if layer_definition.GetFieldIndex( name_col_area ) == -1: # Vérification de l'existence de la colonne name_col_area (retour = -1 : elle n'existe pas) stat_classif_field_defn = ogr.FieldDefn( name_col_area, ogr.OFTReal ) # Création du nom du champ dans l'objet stat_classif_field_defn # Définition de la largeur du champ stat_classif_field_defn.SetWidth(20) # Définition de la précision du champ valeur flottante stat_classif_field_defn.SetPrecision(2) if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "Creation de la colonne : " + str(name_col_area)) layer.CreateField( stat_classif_field_defn) # Ajout du champ if debug >= 2: print( cyan + "statisticsVectorRaster() : " + bold + green + "ETAPE 1/3 : FIN DE LA CREATION DES COLONNES DANS LE FICHIER VECTEUR %s" % (vector_output) + endC) # ETAPE 3/4 : REMPLISSAGE DES COLONNES DU VECTEUR if debug >= 2: print(cyan + "statisticsVectorRaster() : " + bold + green + "ETAPE 2/3 : DEBUT DU REMPLISSAGE DES COLONNES DU VECTEUR " + endC) # Calcul des statistiques col_to_add_inter02_list = [majority, minority, min, max, mean, median, sum, std, unique, range, all, count, all_S, count_S] de croisement images_raster / vecteur # Utilisation de la librairie rasterstat if debug >= 3: print(cyan + "statisticsVectorRaster() : " + bold + green + "Calcul des statistiques " + endC + "Stats : %s - Vecteur : %s - Raster : %s" % (col_to_add_inter02_list, vector_output, image_input) + endC) stats_info_list = raster_stats(vector_output, image_input, band_num=band_number, stats=col_to_add_inter02_list) # Decompte du nombre de polygones num_features = layer.GetFeatureCount() if debug >= 3: print(cyan + "statisticsVectorRaster() : " + bold + green + "Remplissage des colonnes polygone par polygone " + endC) if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "Nombre total de polygones : " + str(num_features)) polygone_count = 0 for polygone_stats in stats_info_list: # Pour chaque polygone représenté dans stats_info_list - et il y a autant de polygone que dans le fichier vecteur # Extraction de feature feature = layer.GetFeature(polygone_stats['__fid__']) polygone_count = polygone_count + 1 if debug >= 3 and polygone_count % 10000 == 0: print(cyan + "statisticsVectorRaster() : " + endC + "Avancement : %s polygones traites sur %s" % (polygone_count, num_features)) if debug >= 5: print( cyan + "statisticsVectorRaster() : " + endC + "Traitement du polygone : ", stats_info_list.index(polygone_stats) + 1) # Remplissage de l'identifiant unique if ("UniqueID" in col_to_add_list) or ( "uniqueID" in col_to_add_list) or ("ID" in col_to_add_list): feature.SetField('ID', int(stats_info_list.index(polygone_stats))) # Initialisation à 0 des colonnes contenant le % de répartition de la classe - Verifier ce qu'il se passe si le nom dépasse 10 caracteres if ('all' in col_to_add_list) or ('count' in col_to_add_list): for element in class_label_dico: name_col = class_label_dico[element] if len(name_col) > 10: name_col = name_col[:10] feature.SetField(name_col, 0) # Initialisation à 0 des colonnes contenant la surface correspondant à la classe - Verifier ce qu'il se passe si le nom dépasse 10 caracteres if ('all_S' in col_to_add_list) or ('count_S' in col_to_add_list): for element in class_label_dico: name_col = class_label_dico[element] name_col_area = PREFIX_AREA_COLUMN + name_col if len(name_col_area) > 10: name_col_area = name_col_area[:10] feature.SetField(name_col_area, 0) # Remplissage des colonnes contenant le % de répartition et la surface des classes if ('all' in col_to_add_list) or ('count' in col_to_add_list) or ( 'all_S' in col_to_add_list) or ('count_S' in col_to_add_list): # 'all' est une liste des couples : (Valeur_du_pixel_sur_le_raster, Nbr_pixel_ayant_cette_valeur) pour le polygone observe. # Ex : [(0,183),(803,45),(801,4)] : dans le polygone, il y a 183 pixels de valeur 0, 45 pixels de valeur 803 et 4 pixels de valeur 801 majority_all = polygone_stats['all'] # Deux valeurs de pixel peuvent faire référence à une même colonne. Par exemple : les pixels à 201, 202, 203 peuvent correspondre à la BD Topo # Regroupement des éléments de majority_all allant dans la même colonne au regard de class_label_dico count_for_idx_couple = 0 # Comptage du nombre de modifications (suppression de couple) de majority_all pour adapter la valeur de l'index lors de son parcours for idx_couple in range( 1, len(majority_all) ): # Inutile d'appliquer le traitement au premier élément (idx_couple == 0) idx_couple = idx_couple - count_for_idx_couple # Prise en compte dans le parcours de majority_all des couples supprimés couple = majority_all[idx_couple] # Ex : couple = (803,45) if (couple is None) or ( couple == "" ): # en cas de bug de rasterstats (erreur geometrique du polygone par exemple) if debug >= 3: print( cyan + "statisticsVectorRaster() : " + bold + red + "Probleme detecte dans la gestion du polygone %s" % (polygone_count) + endC, file=sys.stderr) pass else: for idx_verif in range(idx_couple): # Vérification au regard des éléments présents en amont dans majority_all # Cas où le nom correspondant au label a déjà été rencontré dans majority_all # Vérification que les pixels de l'image sont réferncés dans le dico if couple[0] in class_label_dico: if class_label_dico[couple[0]] == class_label_dico[ majority_all[idx_verif][0]]: majority_all[idx_verif] = ( majority_all[idx_verif][0], majority_all[idx_verif][1] + couple[1] ) # Ajout du nombre de pixels correspondant dans le couple précédent majority_all.remove( couple ) # Supression du couple présentant le "doublon" count_for_idx_couple = count_for_idx_couple + 1 # Mise à jour du décompte de modifications break else: raise NameError( cyan + "statisticsVectorRaster() : " + bold + red + "The image file (%s) contain pixel value '%d' not identified into class_label_dico" % (image_input, couple[0]) + endC) # Intégration des valeurs de majority all dans les colonnes for couple_value_count in majority_all: # Parcours de majority_all. Ex : couple_value_count = (803,45) if (couple_value_count is None) or ( couple_value_count == "" ): # en cas de bug de rasterstats (erreur geometrique du polygone par exemple) if debug >= 3: print( cyan + "statisticsVectorRaster() : " + bold + red + "Probleme detecte dans la gestion du polygone %s" % (polygone_count) + endC, file=sys.stderr) pass else: nb_pixel_total = polygone_stats[ 'count'] # Nbr de pixels du polygone pixel_value = couple_value_count[0] # Valeur du pixel value_count = couple_value_count[ 1] # Nbr de pixels ayant cette valeur name_col = class_label_dico[ pixel_value] # Transformation de la valeur du pixel en "signification" au regard du dictionnaire. Ex : BD Topo ou 2011 name_col_area = PREFIX_AREA_COLUMN + name_col # Identification du nom de la colonne en surfaces if len(name_col) > 10: name_col = name_col[:10] if len(name_col_area) > 10: name_col_area = name_col_area[:10] value_area = pixel_size * value_count # Calcul de la surface du polygone correspondant à la valeur du pixel if nb_pixel_total != None and nb_pixel_total != 0: percentage = ( float(value_count) / float(nb_pixel_total) ) * 100 # Conversion de la surface en pourcentages, arondi au pourcent else: if debug >= 3: print( cyan + "statisticsVectorRaster() : " + bold + red + "Probleme dans l'identification du nombre de pixels du polygone %s : le pourcentage de %s est mis à 0" % (polygone_count, name_col) + endC, file=sys.stderr) percentage = 0.0 if ('all' in col_to_add_list) or ('count' in col_to_add_list): feature.SetField( name_col, percentage ) # Injection du pourcentage dans la colonne correpondante if ('all_S' in col_to_add_list) or ('count_S' in col_to_add_list): feature.SetField( name_col_area, value_area ) # Injection de la surface dans la colonne correpondante else: pass # Remplissage des colonnes statistiques demandées ( col_to_add_inter01_list = [majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range] ) for stats in col_to_add_inter01_list: if stats == 'DateMaj' or stats == 'SrcMaj': # Cas particulier de 'DateMaj' et 'SrcMaj' : le nom de la colonne est DateMaj ou SrcMaj, mais la statistique utilisée est identifiée par majority name_col = stats # Nom de la colonne. Ex : 'DateMaj' value_statis = polygone_stats[ 'majority'] # Valeur majoritaire. Ex : '203' if value_statis == None: value_statis_class = 'nan' else: value_statis_class = class_label_dico[ value_statis] # Transformation de la valeur au regard du dictionnaire. Ex : '2011' feature.SetField(name_col, value_statis_class) # Ajout dans la colonne elif (stats is None) or (stats == "") or ( polygone_stats[stats] is None) or (polygone_stats[stats]) == "" or ( polygone_stats[stats]) == 'nan': # En cas de bug de rasterstats (erreur geometrique du polygone par exemple) pass else: name_col = stats # Nom de la colonne. Ex : 'majority', 'max' value_statis = polygone_stats[ stats] # Valeur à associer à la colonne, par exemple '2011' if ( name_col == 'majority' or name_col == 'minority' ) and class_label_dico != []: # Cas où la colonne fait référence à une valeur du dictionnaire value_statis_class = class_label_dico[value_statis] else: value_statis_class = value_statis feature.SetField(name_col, value_statis_class) layer.SetFeature(feature) feature.Destroy() if debug >= 2: print(cyan + "statisticsVectorRaster() : " + bold + green + "ETAPE 2/3 : FIN DU REMPLISSAGE DES COLONNES DU VECTEUR %s" % (vector_output) + endC) # ETAPE 4/4 : SUPRESSION DES COLONNES NON SOUHAITEES if col_to_delete_list != []: if debug >= 2: print(cyan + "statisticsVectorRaster() : " + bold + green + "ETAPE 3/3 : DEBUT DES SUPPRESSIONS DES COLONNES %s" % (col_to_delete_list) + endC) for col_to_delete in col_to_delete_list: if layer_definition.GetFieldIndex( col_to_delete ) != -1: # Vérification de l'existence de la colonne col (retour = -1 : elle n'existe pas) layer.DeleteField(layer_definition.GetFieldIndex( col_to_delete)) # Suppression de la colonne if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "Suppression de %s" % (col_to_delete) + endC) if debug >= 2: print(cyan + "statisticsVectorRaster() : " + bold + green + "ETAPE 3/3 : FIN DE LA SUPPRESSION DES COLONNES" + endC) else: print(cyan + "statisticsVectorRaster() : " + bold + yellow + "ETAPE 3/3 : AUCUNE SUPPRESSION DE COLONNE DEMANDEE" + endC) # Fermeture du fichier shape layer.SyncToDisk() layer = None data_source.Destroy() # Mise à jour du Log ending_event = "statisticsVectorRaster() : Compute statistic crossing ending : " timeLine(path_time_log, ending_event) return
def split_superpixels_and_reference( vector_file: str, superpix_column: Optional[str] = "superpix", driver_in: Optional[str] = "SQLite", working_dir: Optional[str] = None, logger: Optional[logging.Logger] = LOGGER) -> None: """ reference feature contains the value 0 in column 'superpix' Parameters ---------- vector_file : string the input vector file superpix_column: string the column name for superpixels in vector_file driver : string the vector_file format Return ------ None """ import ogr import shutil from iota2.Common.Utils import run driver = ogr.GetDriverByName(driver_in) data_source = driver.Open(vector_file, 0) layer = data_source.GetLayer() table_name = layer.GetName() feat = layer.GetNextFeature() geom = feat.GetGeometryRef() spatial_ref = geom.GetSpatialReference() epsg_code = int(spatial_ref.GetAttrValue("AUTHORITY", 1)) vectors_dir, vector_name = os.path.split(vector_file) tmp_dir = vectors_dir if working_dir: tmp_dir = working_dir superpix_db = os.path.join( tmp_dir, vector_name.replace("learn.sqlite", "SP.sqlite")) ref_db = os.path.join(tmp_dir, vector_name.replace("learn.sqlite", "REF.sqlite")) logger.info( f"Extract superpixel samples from file {superpix_db} and save it " f"in {superpix_db}") sql = f"select * from {table_name} where {superpix_column}!={0}" cmd = ( f'ogr2ogr -t_srs EPSG:{epsg_code} -s_srs EPSG:{epsg_code} -nln' f' {table_name} -f "{driver_in}" -sql "{sql}" {superpix_db} {vector_file}' ) run(cmd) logger.info( f"Extract reference samples from file {vector_file} and save it" f" in {vector_file}") sql = f"select * from {table_name} where {superpix_column}={0}" cmd = (f'ogr2ogr -t_srs EPSG:{epsg_code} -s_srs EPSG:{epsg_code} ' f'-nln {table_name} -f "{driver_in}" -sql "{sql}" ' f'{ref_db} {vector_file}') run(cmd) shutil.move(ref_db, vector_file) if working_dir: shutil.copy(superpix_db, vectors_dir) os.remove(superpix_db) # TODO : replace og2ogr by geopandas ? # conn = sqlite3.connect(vector_file) # df = geopd.GeoDataFrame.from_postgis(sql, conn, geom_col="geometry") # conn_out = sqlite3.connect(vector_file.replace(".sqlite", "_V3.sqlite")) # df.to_sql(table_name, conn_out) return vector_file, os.path.join(vectors_dir, os.path.split(superpix_db)[-1])
def main(outputGridfn, xmin, xmax, ymin, ymax, gridHeight, gridWidth): # convert sys.argv to float xmin = float(xmin) xmax = float(xmax) ymin = float(ymin) ymax = float(ymax) gridWidth = float(gridWidth) gridHeight = float(gridHeight) # get rows rows = ceil((ymax - ymin) / gridHeight) # get columns cols = ceil((xmax - xmin) / gridWidth) # start grid cell envelope ringXleftOrigin = xmin ringXrightOrigin = xmin + gridWidth ringYtopOrigin = ymax ringYbottomOrigin = ymax - gridHeight # create output file outDriver = ogr.GetDriverByName('ESRI Shapefile') if os.path.exists(outputGridfn): os.remove(outputGridfn) outDataSource = outDriver.CreateDataSource(outputGridfn) outLayer = outDataSource.CreateLayer(outputGridfn, geom_type=ogr.wkbPolygon) featureDefn = outLayer.GetLayerDefn() # create grid cells countcols = 0 while countcols < cols: countcols += 1 # reset envelope for rows ringYtop = ringYtopOrigin ringYbottom = ringYbottomOrigin countrows = 0 while countrows < rows: countrows += 1 ring = ogr.Geometry(ogr.wkbLinearRing) ring.AddPoint(ringXleftOrigin, ringYtop) ring.AddPoint(ringXrightOrigin, ringYtop) ring.AddPoint(ringXrightOrigin, ringYbottom) ring.AddPoint(ringXleftOrigin, ringYbottom) ring.AddPoint(ringXleftOrigin, ringYtop) poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(ring) # add new geom to layer outFeature = ogr.Feature(featureDefn) outFeature.SetGeometry(poly) outLayer.CreateFeature(outFeature) outFeature.Destroy # new envelope for next poly ringYtop = ringYtop - gridHeight ringYbottom = ringYbottom - gridHeight # new envelope for next poly ringXleftOrigin = ringXleftOrigin + gridWidth ringXrightOrigin = ringXrightOrigin + gridWidth # Close DataSources outDataSource.Destroy()
def identifyIntersections(in_shp_pth, out_shp_pth, id_field="ID"): """ VERY SLOW Identifies intersections between polygons of a shapefile. Writes intersections specfied output path. :param in_shp_pth: Input shapefile of polygons. :param out_pth: Output path to which the intersections are written. Input filename will be extended by "_intersections". :return: No object returned, but shapefile will be written to disc. """ import os import ogr import vector in_shp = ogr.Open(in_shp_pth, 0) in_lyr = in_shp.GetLayer() fname_lst = vector.getFieldNames(in_shp) copy_shp, copy_lyr = vector.copyLayerToMemory(in_lyr) drv_shp = ogr.GetDriverByName('ESRI Shapefile') in_sr = in_lyr.GetSpatialRef() in_lyr_defn = in_lyr.GetLayerDefn() if os.path.exists(out_shp_pth): drv_shp.DeleteDataSource(out_shp_pth) inters_shp = drv_shp.CreateDataSource(out_shp_pth) lyr_name = os.path.splitext(os.path.split(out_shp_pth)[1])[0] geom_type = ogr.wkbPolygon inters_lyr = inters_shp.CreateLayer(lyr_name, in_sr, geom_type=geom_type) for i in range(0, in_lyr_defn.GetFieldCount()): field_def = in_lyr_defn.GetFieldDefn(i) inters_lyr.CreateField(field_def) # inters_lyr.CreateField(ogr.FieldDefn('ID', ogr.OFTInteger64)) inters_lyr.CreateField(ogr.FieldDefn('IDInters', ogr.OFTString)) inters_lyr_defn = inters_lyr.GetLayerDefn() num_fields = inters_lyr_defn.GetFieldCount() id_inters_lst = [] for feat_curr in in_lyr: id1 = feat_curr.GetField(id_field) # print("FEATURE: {}".format(id1)) geom_curr = feat_curr.GetGeometryRef() copy_lyr.SetSpatialFilter(geom_curr) for feat_nb in copy_lyr: id2 = feat_nb.GetField(id_field) id_inters = '{0}_{1}'.format(min([id1, id2]), max([id1, id2])) geom_nb = feat_nb.geometry() if id1 != id2: # print("Neighbouring features: {}".format(id2)) if geom_nb.Intersects(geom_curr): intersection = geom_nb.Intersection(geom_curr) if intersection == None: area_inters = 0 else: geom_type = intersection.GetGeometryName() if geom_type not in [ 'POLYGON', 'MULTIPOLYGON' ]: # in ['MULTILINESTRING', 'POINT', 'LINESTRING','MULTIPOINT']: #alternatively intersection = None area_inters = 0 else: area_inters = round(intersection.Area(), 1) else: intersection = None area_inters = 0 ## if the id of the intersection is not already in the list and its area is bigger than 0 ## then add this feature to the intersection layer if area_inters > 0.0 and id_inters not in id_inters_lst: intersection = intersection.Buffer(0) intersection = intersection.MakeValid() wkt_inters = intersection.ExportToWkt() poly = ogr.CreateGeometryFromWkt(wkt_inters) out_feat = ogr.Feature(inters_lyr_defn) out_feat.SetGeometry(poly) for fname in fname_lst: ind = fname_lst.index(fname) attr = feat_curr.GetField(fname) out_feat.SetField(ind, attr) ind = len(fname_lst) out_feat.SetField(ind, id_inters) inters_lyr.CreateFeature(out_feat) ouf_feat = None id_inters_lst.append(id_inters) else: pass copy_lyr.SetSpatialFilter(None) copy_lyr.ResetReading() in_lyr.ResetReading() inters_lyr.ResetReading() del copy_shp, copy_lyr del inters_shp, inters_lyr del in_shp, in_lyr
except ImportError: from osgeo import osr #an example shapefile file shp_file = "../static_files/shapefile/watershed/huc18_250k.shp" #open the shapefile shp_datasource = ogr.Open(shp_file) #create spatial reference srs = osr.SpatialReference() #in this case wgs84 srs.ImportFromEPSG(4326) #set driver to KML to be able to create kml file driver = ogr.GetDriverByName('KML') #kml file to export kml_file = 'line.kml' #kml layer layer_name = 'kml_layer' #create a kml datasource kml_datasource = driver.CreateDataSource(kml_file) #create a kml layer for kml datasource kml_layer = kml_datasource.CreateLayer(layer_name, srs, ogr.wkbLineString) #get shapefile layer number layer_number = shp_datasource.GetLayerCount()
for i in range(len(polygon_coords) - 1): lon_interp = np.linspace(polygon_coords[i][0], polygon_coords[i + 1][0], 100) lat_interp = np.linspace(polygon_coords[i][1], polygon_coords[i + 1][1], 100) list_lon_interp.append(lon_interp) list_lat_interp.append(lat_interp) all_lon_interp = np.concatenate(list_lon_interp) all_lat_interp = np.concatenate(list_lat_interp) return np.array(list(zip(all_lon_interp, all_lat_interp))) ds_shp_in = ogr.GetDriverByName('ESRI Shapefile').Open(fn_shp_in, 0) ds_shp_out = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource(fn_shp_out) srs = osr.SpatialReference() srs.ImportFromEPSG(4326) layer_out = ds_shp_out.CreateLayer('buff', srs=srs, geom_type=ogr.wkbPolygon) layer_in = ds_shp_in.GetLayer() multipoly = ogr.Geometry(ogr.wkbMultiPolygon) for feature in layer_in: geom = feature.GetGeometryRef() print('Working on:' + feature.GetField('RGIId'))
def ogr_gxt_3(): if gdaltest.gxt_ds is not None: gdaltest.gxt_ds.Destroy() gdaltest.gxt_ds = None src_ds = ogr.Open('data/points.gxt') try: os.remove('tmp/tmp.gxt') except: pass # Duplicate all the points from the source GXT src_lyr = src_ds.GetLayerByName('points.points') gdaltest.gxt_ds = ogr.GetDriverByName('Geoconcept').CreateDataSource( 'tmp/tmp.gxt') srs = osr.SpatialReference() srs.SetWellKnownGeogCS('WGS84') gxt_lyr = gdaltest.gxt_ds.CreateLayer('points', srs, geom_type=ogr.wkbPoint) src_lyr.ResetReading() for i in range(src_lyr.GetLayerDefn().GetFieldCount()): field_defn = src_lyr.GetLayerDefn().GetFieldDefn(i) gxt_lyr.CreateField(field_defn) dst_feat = ogr.Feature(feature_def=gxt_lyr.GetLayerDefn()) feat = src_lyr.GetNextFeature() while feat is not None: dst_feat.SetFrom(feat) if gxt_lyr.CreateFeature(dst_feat) != 0: gdaltest.post_reason('CreateFeature failed.') return 'fail' feat = src_lyr.GetNextFeature() dst_feat.Destroy() src_ds.Destroy() gdaltest.gxt_ds.Destroy() gdaltest.gxt_ds = None # Read the newly written GXT file and check its features and geometries gdaltest.gxt_ds = ogr.Open('tmp/tmp.gxt') gxt_lyr = gdaltest.gxt_ds.GetLayerByName('points.points') if not gxt_lyr.GetSpatialRef().IsSame(srs): gdaltest.post_reason('Output SRS is not the one expected.') return 'fail' expect = ['PID1', 'PID2'] tr = ogrtest.check_features_against_list(gxt_lyr, 'Primary_ID', expect) if not tr: return 'fail' gxt_lyr.ResetReading() expect = ['SID1', 'SID2'] tr = ogrtest.check_features_against_list(gxt_lyr, 'Secondary_ID', expect) if not tr: return 'fail' gxt_lyr.ResetReading() expect = ['TID1', None] tr = ogrtest.check_features_against_list(gxt_lyr, 'Third_ID', expect) if not tr: return 'fail' gxt_lyr.ResetReading() feat = gxt_lyr.GetNextFeature() if ogrtest.check_feature_geometry( feat, 'POINT(0 1)', max_error=0.000000001) != 0: return 'fail' feat = gxt_lyr.GetNextFeature() if ogrtest.check_feature_geometry( feat, 'POINT(2 3)', max_error=0.000000001) != 0: return 'fail' return 'success'
import osr import os import shutil # Source and target file names srcName = "NYC_MUSEUMS_LAMBERT.shp" tgtName = "NYC_MUSEUMS_GEO.shp" # Target spatial reference tgt_spatRef = osr.SpatialReference() tgt_spatRef.ImportFromEPSG(4326) # Account for the flipped axis change in the latest GDAL tgt_spatRef.SetAxisMappingStrategy(0) # Source shapefile driver = ogr.GetDriverByName("ESRI Shapefile") src = driver.Open(srcName, 0) srcLyr = src.GetLayer() # Source spatial reference src_spatRef = srcLyr.GetSpatialRef() # Target shapefile - # delete if it's already # there. if os.path.exists(tgtName): driver.DeleteDataSource(tgtName) tgt = driver.CreateDataSource(tgtName) lyrName = os.path.splitext(tgtName)[0] tgtLyr = tgt.CreateLayer(lyrName, geom_type=ogr.wkbPoint)
def point2Shp(df_osm, valueArray, fn, pt_lyrName_w, ref_lyr=False): ds = ogr.Open(fn, 1) # '''参考层,用于空间坐标投影,字段属性等参照''' # ref_lyr=ds.GetLayer(ref_lyr) # ref_sr=ref_lyr.GetSpatialRef() # print(ref_sr) # ref_schema=ref_lyr.schema #查看属性表字段名和类型 # for field in ref_schema: # print(field.name,field.GetTypeName()) '''建立新的datasource数据源''' sf_driver = ogr.GetDriverByName('ESRI Shapefile') sfDS = os.path.join(fn, r'sf') # if os.path.exists(sfDS): # sf_driver.DeleteDataSource(sfDS) pt_ds = sf_driver.CreateDataSource(sfDS) if pt_ds is None: sys.exit('Could not open{0}'.format(sfDS)) '''建立新layer层''' if pt_ds.GetLayer(pt_lyrName_w): pt_ds.DeleteLayer(pt_lyrName_w) spatialRef = osr.SpatialReference() spatialRef.SetWellKnownGeogCS( "WGS84") #需要注意直接定义大地坐标未"WGS84",而未使用参考层提取的坐标投影系统 pt_lyr = pt_ds.CreateLayer(pt_lyrName_w, spatialRef, ogr.wkbPoint) # pt_lyr=pt_ds.CreateLayer(pt_lyrName_w,ref_sr,ogr.wkbPoint) '''配置字段,名称以及类型和相关参数''' # pt_lyr.CreateFields(ref_schema) LatFd = ogr.FieldDefn("origiLat", ogr.OFTReal) LatFd.SetWidth(20) LatFd.SetPrecision(3) pt_lyr.CreateField(LatFd) LatFd.SetName("origiLong") pt_lyr.CreateField(LatFd) # pt_lyr.CreateFields(ref_schema) preFd = ogr.FieldDefn("type", ogr.OFTString) pt_lyr.CreateField(preFd) preFd.SetName("tagkey") pt_lyr.CreateField(preFd) preFd.SetName("tagvalue") pt_lyr.CreateField(preFd) preFd = ogr.FieldDefn("cluster", ogr.OFTInteger) pt_lyr.CreateField(preFd) # preFd.SetName("cluster") # pt_lyr.CreateField(preFd) # stationName=ogr.FieldDefn("stationN",ogr.OFTString) # pt_lyr.CreateField(stationName) # preFd.SetName("ObservTime") # pt_lyr.CreateField(preFd) # '''建立feature空特征和设置geometry几何类型''' print(pt_lyr.GetLayerDefn()) pt_feat = ogr.Feature(pt_lyr.GetLayerDefn()) # idx=0 for i in tqdm(range(valueArray.shape[0])): #循环feature # print(key) '''设置几何体''' #pt_ref=feat.geometry().Clone() # converCoordiGCJ=cc.bd09togcj02(dataBunch.data[i][1],dataBunch.data[i][0]) # converCoordiGPS84=cc.gcj02towgs84(converCoordiGCJ[0],converCoordiGCJ[1]) # print(wdCoordiDicSingle[key][1],wdCoordiDicSingle[key][0]) # print(converCoordiGPS84[0], converCoordiGPS84[1]) wkt = "POINT(%f %f)" % (df_osm["lon"][i], df_osm["lat"][i]) # wkt="POINT(%f %f)" % (dataBunch.data[i][0], dataBunch.data[i][1]) newPt = ogr.CreateGeometryFromWkt(wkt) #使用wkt的方法建立点 pt_feat.SetGeometry(newPt) '''设置字段值''' # for i_field in range(feat.GetFieldCount()): # pt_feat.SetField(i_field,feat.GetField(i_field)) pt_feat.SetField("origiLat", df_osm["lat"][i]) pt_feat.SetField("origiLong", df_osm["lon"][i]) # print(wdDicComplete[key]['20140901190000']) pt_feat.SetField("type", df_osm["type"][i]) # pt_feat.SetField("tagkey", df_osm["tagkey"][i]) pt_feat.SetField("tagvalue", df_osm["tagvalue"][i]) pt_feat.SetField("cluster", int(valueArray[i])) # print(idx,int(valueArray[idx]),pt_ref.GetX()) # idx+=1 '''根据设置的几何体和字段值,建立feature。循环建立多个feature特征''' pt_lyr.CreateFeature(pt_feat) del ds
def build_tiles_shp(mosaicname, tiles, params): tiles_shp = mosaicname + "_tiles.shp" if os.path.isfile(tiles_shp): logger.info("Tiles shapefile already exists: %s" %os.path.basename(tiles_shp)) else: logger.info("Creating shapefile of tiles: %s" %os.path.basename(tiles_shp)) fields = [('ROW', ogr.OFTInteger, 4), ('COL', ogr.OFTInteger, 4), ("TILENAME", ogr.OFTString, 100), ('TILEPATH', ogr.OFTString, 254), ('XMIN', ogr.OFTReal, 0), ('XMAX', ogr.OFTReal, 0), ('YMIN', ogr.OFTReal, 0), ('YMAX', ogr.OFTReal, 0)] OGR_DRIVER = "ESRI Shapefile" ogrDriver = ogr.GetDriverByName(OGR_DRIVER) if ogrDriver is None: logger.error("OGR: Driver %s is not available" % OGR_DRIVER) sys.exit(-1) if os.path.isfile(tiles_shp): ogrDriver.DeleteDataSource(tiles_shp) vds = ogrDriver.CreateDataSource(tiles_shp) if vds is None: logger.error("Could not create shp") sys.exit(-1) shpd, shpn = os.path.split(tiles_shp) shpbn, shpe = os.path.splitext(shpn) rp = osr.SpatialReference() rp.ImportFromWkt(params.proj) lyr = vds.CreateLayer(shpbn, rp, ogr.wkbPolygon) if lyr is None: logger.error("ERROR: Failed to create layer: %s" % shpbn) sys.exit(-1) for fld, fdef, flen in fields: field_defn = ogr.FieldDefn(fld, fdef) if fdef == ogr.OFTString: field_defn.SetWidth(flen) if lyr.CreateField(field_defn) != 0: logger.error("ERROR: Failed to create field: %s" % fld) for t in tiles: feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField("TILENAME",os.path.basename(t.name)) feat.SetField("TILEPATH",t.name) feat.SetField("ROW",t.j) feat.SetField("COL",t.i) feat.SetField("XMIN",t.xmin) feat.SetField("XMAX",t.xmax) feat.SetField("YMIN",t.ymin) feat.SetField("YMAX",t.ymax) feat.SetGeometry(t.geom) if lyr.CreateFeature(feat) != 0: logger.error("ERROR: Could not create feature for tile %s" % tile) feat.Destroy()
def get_skelton(tifname, dso=None, name_use=None, fn_censor=None): if name_use is None: name_use = tifname name_use = os.path.basename(name_use) ds = gdal.Open(tifname) gt = np.array(ds.GetGeoTransform()) nc = ds.RasterXSize nr = ds.RasterYSize # get projection srs = osr.SpatialReference() srs.ImportFromWkt(ds.GetProjection()) # if its sinusoidal, i know what to do if fn_censor is None: srs0 = osr.SpatialReference() srs0.ImportFromProj4( '+proj=sinu +lon_0=0 +x_0=0 +y_0=0 +a=6371007.181 +b=6371007.181 +units=m +no_defs' ) #print(srs) #print(srs0) # this IsSame doesnt work... if srs.IsSame(srs0): # given y coord, i can calculate length of the parallel. # if x coord is beyond what's expected, shift to the x bound fn_censor = censor_sinu else: fn_censor = lambda x: x # get corners and points along sides # points on one side num = 50 xp = np.rint(np.linspace(0, nc, num + 1)) yp = np.rint(np.linspace(0, nr, num + 1)) # coords along four sides xy = np.zeros(((num) * 4 + 1, 2)) xy[(0 * num):(1 * num), 0] = 0 xy[(0 * num):(1 * num), 1] = yp[:-1] xy[(1 * num):(2 * num), 0] = xp[:-1] xy[(1 * num):(2 * num), 1] = yp[-1] xy[(2 * num):(3 * num), 0] = xp[-1] xy[(2 * num):(3 * num), 1] = yp[:0:-1] xy[(3 * num):(4 * num), 0] = xp[:0:-1] xy[(3 * num):(4 * num), 1] = 0 xy[(4 * num), :] = 0 # inver order so that points go clockwise, somwhow it works better in shapely # with original ordering, it failed to fix the shape in siberia ('h22v01'), # for example. top right tile which tourches boundary seemed to fail # better way is to not rely on censor method, which shift out of boundary points # horizontally to boundary but also check if vertical correction is needed # (move to intersection of side of tile and the boundary), but i just let it # go for now, it is sort of working # xy = xy[::-1, :] # coords in dataset's coordinate xy = np.apply_along_axis( lambda p: (gt[0] + (gt[1:3] * p).sum(), gt[3] + (gt[4:6] * p).sum()), 1, xy) # censor points outside of defined area xy = fn_censor(xy) # remove repeated points (after censoring) ok = [0] for i in range(1, (xy.shape[0])): if not np.array_equal(xy[i - 1, :], xy[i, :]): ok.append(i) xy = xy[ok, :] # make it into polygon poly = Polygon(xy) if not poly.is_valid: poly = poly.buffer(0) if not poly.is_valid: import pdb pdb.set_trace() assert poly.area > 0 # ogr memory dataset if dso is None: # create dataset drv = ogr.GetDriverByName('Memory') dso = drv.CreateDataSource('poly') lyr = dso.CreateLayer('', srs, ogr.wkbPolygon) lyr.CreateField(ogr.FieldDefn('id', ogr.OFTInteger)) fdefn = ogr.FieldDefn('name', ogr.OFTString) fdefn.SetWidth(255) lyr.CreateField(fdefn) defn = lyr.GetLayerDefn() idn = 1 else: # last record lyr = dso.GetLayer() defn = lyr.GetLayerDefn() idn = lyr.GetFeatureCount() idn = max(idn, lyr.GetFeature(idn - 1).GetField('id')) + 1 # add the polygon to the dataset feat = ogr.Feature(defn) feat.SetField('id', idn) feat.SetField('name', name_use) geom = ogr.CreateGeometryFromWkb(poly.wkb) feat.SetGeometry(geom) lyr.CreateFeature(feat) feat = geom = None lyr = None return dso
def fault2shp(corner_lons, corner_lats, output_shp, corner_depths=None, vertice_array=False): """Function for writing a fault geometry to a shapefile """ # Create a Polygon from the extent tuple ring = ogr.Geometry(ogr.wkbLinearRing) #for i in range(len(corner_lons)): # need to get in right order if vertice_array: # Assume corner_lons, corner_lats are 2 1D array # giving the corrdinates of the polygon boundary for i in range(len(corner_lons)): ring.AddPoint(corner_lons[i], corner_lats[i]) ring.AddPoint(corner_lons[0], corner_lats[0]) # close polygon else: ring.AddPoint(corner_lons[0], corner_lats[0]) ring.AddPoint(corner_lons[1], corner_lats[1]) ring.AddPoint(corner_lons[3], corner_lats[3]) ring.AddPoint(corner_lons[2], corner_lats[2]) ring.AddPoint(corner_lons[0], corner_lats[0]) # close polygon poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(ring) drv = ogr.GetDriverByName('ESRI Shapefile') # Remove output shapefile if it already exists if os.path.exists(output_shp): drv.DeleteDataSource(output_shp) # Create the output shapefile outDataSource = drv.CreateDataSource(output_shp) outLayer = outDataSource.CreateLayer("Fault_geom", geom_type=ogr.wkbPolygon) # Add an ID field idField = ogr.FieldDefn("id", ogr.OFTInteger) outLayer.CreateField(idField) # Add a depth field depthField = ogr.FieldDefn("mean_depth", ogr.OFTReal) outLayer.CreateField(depthField) # Create the feature and set values featureDefn = outLayer.GetLayerDefn() feature = ogr.Feature(featureDefn) feature.SetGeometry(poly) feature.SetField("id", 1) feature.SetField("mean_depth", mean(corner_depths)) outLayer.CreateFeature(feature) feature = None # Save and close outDataSource = None drv = None # Now write upper trace to line shapefile line = ogr.Geometry(ogr.wkbLineString) corner_depths = list(corner_depths) min_dep = min(corner_depths) min_dep_index = corner_depths.index(min(corner_depths)) print min_dep_index line.AddPoint(corner_lons[min_dep_index], corner_lats[min_dep_index]) corner_depths[min_dep_index] = 1e10 min_dep_2 = min(corner_depths) min_dep_index_2 = corner_depths.index(min(corner_depths)) print min_dep_index_2 line.AddPoint(corner_lons[min_dep_index_2], corner_lats[min_dep_index_2]) corner_depths[min_dep_index] = min_dep print min_dep, min_dep_2 mean_upper_depth = mean([min_dep, min_dep_2]) print mean_upper_depth drv = ogr.GetDriverByName('ESRI Shapefile') output_shp = output_shp.rstrip('.shp') + '_upper_edge.shp' # Remove output shapefile if it already exists if os.path.exists(output_shp): drv.DeleteDataSource(output_shp) # Create the output shapefile outDataSource = drv.CreateDataSource(output_shp) outLayer = outDataSource.CreateLayer("Fault_geom", geom_type=ogr.wkbLineString) # Add a depth field depthField = ogr.FieldDefn("mean_depth", ogr.OFTReal) outLayer.CreateField(depthField) # Create the feature and set values featureDefn = outLayer.GetLayerDefn() feature = ogr.Feature(featureDefn) feature.SetGeometry(line) feature.SetField("mean_depth", mean_upper_depth) outLayer.CreateFeature(feature) feature = None # Save and close outDataSource = None drv = None
def work_polygonize(self, tifnames, dstdir, bname, dryrun=False): # create vrt first, and then generate tiled warped files if not os.path.exists(dstdir): os.makedirs(dstdir) # create vrtual dataset vrtname = os.path.join(dstdir, 'src.vrt') #cmd = 'gdalbuildvrt %s %s' % ( vrtname, ' '.join(tifnames)) # anaconda on win had trouble with long command line, ,so rewrote with -input_file_ist with open('tifnames.txt', 'w') as f: f.write('\n'.join(tifnames) + '\n') cmd = 'gdalbuildvrt %s -input_file_list %s' % (vrtname, 'tifnames.txt') status = os.system(cmd) if status != 0: raise RuntimeError('exit status %s, cmd = %s' % (status, cmd)) # open the unified band ds = gdal.Open(vrtname) srs0 = ds.GetProjection() #print(srs0) #print(ds) b = ds.GetRasterBand(1) # create mask band # valid data are 1-366. 0 is unbunred, -1 fill, -2 water # mask file should has 1 for valid, 0 for invalid drv = gdal.GetDriverByName('MEM') dsm = drv.CreateCopy('mem0', ds) m = dsm.GetRasterBand(1) # TODO # this may bomb if raster is large # solution, make mask for each tile # maybe do it with scratch file, not memory # then make vrt # Band.CreateMaskBand() may be useful try: arr = m.ReadAsArray() print('good') except Exception as e: print(e) raise arr[arr < 0] = 0 m.WriteArray(arr) oname0 = os.path.join(dstdir, '.'.join([bname, 'sinu', 'poly', 'shp'])) oname = os.path.join(dstdir, '.'.join([bname, 'poly', 'shp'])) #print(oname0) #print(oname) # output shapefile #drv = ogr.GetDriverByName('Memory') drv = ogr.GetDriverByName('ESRI Shapefile') if os.path.exists(oname0): drv.DeleteDataSource(oname0) # import pdb # pdb.set_trace() dst0 = drv.CreateDataSource(oname0) srs = osr.SpatialReference() srs.ImportFromWkt(srs0) #print(dst0) #print(ogr) lyr0 = dst0.CreateLayer('lyr0', srs, ogr.wkbPolygon) #print(lyr0) # need point shape file too onamep0 = os.path.join(dstdir, '.'.join([bname, 'sinu', 'pnt', 'shp'])) onamep = os.path.join(dstdir, '.'.join([bname, 'pnt', 'shp'])) if os.path.exists(onamep0): drv.DeleteDataSource(onamep0) dstp0 = drv.CreateDataSource(onamep0) lyrp0 = dstp0.CreateLayer('lyrp0', srs, ogr.wkbPoint) fd = ogr.FieldDefn('BurnYear', ogr.OFTInteger) lyr0.CreateField(fd) lyrp0.CreateField(fd) fd = ogr.FieldDefn('BurnDate', ogr.OFTInteger) lyr0.CreateField(fd) lyrp0.CreateField(fd) fd = ogr.FieldDefn('area_sqkm', ogr.OFTReal) lyr0.CreateField(fd) fld = 1 #second field def pointize(b, m, lyrp0, fld): # similar to gdal.Polygonize() a = b.ReadAsArray() #jdx,idx = np.nonzero(m) indices = np.nonzero(m.ReadAsArray()) if len(indices) == 1: # i am guessing it means empty return #print(indices) jdx, idx = indices gt = b.GetDataset().GetGeoTransform() xcoord = (idx + .5) * gt[1] + gt[0] ycoord = (jdx + .5) * gt[5] + gt[3] n = xcoord.size fldname = lyrp0.GetLayerDefn().GetFieldDefn(fld).GetNameRef() print(n) #print(lyr0.GetFeatureCount()) print(lyrp0.GetFeatureCount()) for i in range(n): geom = ogr.Geometry(ogr.wkbPoint) geom.AddPoint(xcoord[i], ycoord[i]) v = a[jdx[i], idx[i]] feat = ogr.Feature(lyrp0.GetLayerDefn()) #print(fldname) #print(a[jdx[i],idx[i]].astype(int)) #print(type(a[jdx[i],idx[i]].astype(int))) #print(type(1)) # TODO there should be more cleaner way feat.SetField(fldname, int(a[jdx[i], idx[i]].astype(int))) feat.SetGeometry(geom) lyrp0.CreateFeature(feat) del feat #print(lyr0.GetFeatureCount()) print(lyrp0.GetFeatureCount()) gdal.Polygonize(b, m, lyr0, fld, [], callback=None) pointize(b, m, lyrp0, fld) print(lyrp0.GetFeatureCount()) lyrp0.SyncToDisk() # close and reopen. ugly... del lyrp0 del dstp0 subprocess.run(['ls', '-l', onamep0], check=True) #subprocess.run(['ogrinfo', onamep0], check=True) dstp0 = drv.Open(onamep0) del b, m # band del ds, dsm # raster lyr0.SetNextByIndex(0) for i, feat in enumerate(lyr0): feat.SetField('BurnYear', self.year) geom = feat.GetGeometryRef() feat.SetField('area_sqkm', geom.GetArea() / 1000000) lyr0.SetFeature(feat) # project target_projection = '+proj=longlat +datum=WGS84 +no_defs' drv = ogr.GetDriverByName('ESRI Shapefile') if os.path.exists(oname): drv.DeleteDataSource(oname) srs1 = osr.SpatialReference() srs1.ImportFromProj4(target_projection) dst = transform_coordinates(dst0, srs1, drv, oname=oname) del lyr0 del dst0 if os.path.exists(onamep): drv.DeleteDataSource(onamep) # Couldn't figure out why this does work, so use gdalwarp command #dstp = transform_coordinates(dstp0, srs1, drv, oname=onamep) del dstp0 subprocess.run(['ls', '-l', onamep0], check=True) subprocess.run(['ogrinfo', onamep0], check=True) subprocess.run( ['ogr2ogr', '-t_srs', target_projection, onamep, onamep0], check=True) #del lyrp0 #del dstp0 #del dstp return dst
def ogr_vrt_20(): if gdaltest.vrt_ds is None: return 'skip' gdal.PushErrorHandler('CPLQuietErrorHandler') try: ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test.shp') except: pass gdal.PopErrorHandler() shp_ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource( 'tmp/test.shp') shp_lyr = shp_ds.CreateLayer('test') feat = ogr.Feature(shp_lyr.GetLayerDefn()) geom = ogr.CreateGeometryFromWkt('POINT (-10 45)') feat.SetGeometryDirectly(geom) shp_lyr.CreateFeature(feat) feat.Destroy() feat = ogr.Feature(shp_lyr.GetLayerDefn()) geom = ogr.CreateGeometryFromWkt('POINT (-10 49)') feat.SetGeometryDirectly(geom) shp_lyr.CreateFeature(feat) feat.Destroy() feat = ogr.Feature(shp_lyr.GetLayerDefn()) geom = ogr.CreateGeometryFromWkt('POINT (2 49)') feat.SetGeometryDirectly(geom) shp_lyr.CreateFeature(feat) feat.Destroy() feat = ogr.Feature(shp_lyr.GetLayerDefn()) geom = ogr.CreateGeometryFromWkt('POINT (-10 49)') feat.SetGeometryDirectly(geom) shp_lyr.CreateFeature(feat) feat.Destroy() shp_ds.ExecuteSQL('CREATE SPATIAL INDEX on test') shp_ds.Destroy() vrt_xml = """ <OGRVRTDataSource> <OGRVRTLayer name="test"> <SrcDataSource relativeToVRT="0">tmp/test.shp</SrcDataSource> <SrcLayer>test</SrcLayer> </OGRVRTLayer> </OGRVRTDataSource>""" vrt_ds = ogr.Open(vrt_xml) vrt_lyr = vrt_ds.GetLayerByName('test') if vrt_lyr.TestCapability(ogr.OLCFastFeatureCount) != 1: gdaltest.post_reason('Fast feature count not set.') return 'fail' if vrt_lyr.TestCapability(ogr.OLCFastSpatialFilter) != 1: gdaltest.post_reason('Fast filter not set.') return 'fail' if vrt_lyr.TestCapability(ogr.OLCFastGetExtent) != 1: gdaltest.post_reason('Fast extent not set.') return 'fail' extent = vrt_lyr.GetExtent() if extent != (-10.0, 2.0, 45.0, 49.0): gdaltest.post_reason('wrong extent') print(extent) return 'fail' if vrt_lyr.GetFeatureCount() != 4: gdaltest.post_reason('Feature count not 4 as expected.') return 'fail' vrt_lyr.SetSpatialFilterRect(1, 48.5, 3, 49.5) if vrt_lyr.GetFeatureCount() != 1: if gdal.GetLastErrorMsg().find('GEOS support not enabled') != -1: ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource( 'tmp/test.shp') return 'skip' print(vrt_lyr.GetFeatureCount()) gdaltest.post_reason('did not get one feature on rect spatial filter.') return 'fail' if vrt_lyr.TestCapability(ogr.OLCFastFeatureCount) != 1: gdaltest.post_reason('Fast feature count not set.') return 'fail' if vrt_lyr.TestCapability(ogr.OLCFastGetExtent) != 1: gdaltest.post_reason('Fast extent not set.') return 'fail' extent = vrt_lyr.GetExtent() # the shapefile driver currently doesn't change the extent even in the # presence of a spatial filter, so that could change in the future if extent != (-10.0, 2.0, 45.0, 49.0): gdaltest.post_reason('wrong extent') print(extent) return 'fail' vrt_lyr.SetSpatialFilterRect(1, 48, 3, 48.5) if vrt_lyr.GetFeatureCount() != 0: gdaltest.post_reason('Did not get expected zero feature count.') return 'fail' vrt_lyr.SetSpatialFilter(None) if vrt_lyr.GetFeatureCount() != 4: gdaltest.post_reason('Feature count not 4 as expected with no filter.') return 'fail' vrt_ds.Destroy() vrt_ds = None ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test.shp') return 'success'
displayName = nome[0:len(nome) - 4].split('\\')[len(nome[0:len(nome) - 4].split('\\')) - 1] filename = QgsVectorLayer(fileroute, displayName, "ogr") #QgsMapLayerRegistry.instance().addMapLayer(filename,False) limite = "C://Users//Desktop//kml-shapefile//" + displayName + ".shp" iface.addVectorLayer(limite, " " + displayName, "ogr") outputMergefn = 'C://Users//Desktop//kml-shapefile//rodovias.shp' directory = "C://Users//Desktop//kml-shapefile//" fileStartsWith = 'rod' fileEndsWith = '.shp' driverName = 'ESRI Shapefile' geometryType = ogr.wkbLineString out_driver = ogr.GetDriverByName(driverName) out_ds = out_driver.CreateDataSource(outputMergefn) out_layer = out_ds.CreateLayer(outputMergefn, geom_type=geometryType) fileList = os.listdir(directory) for file in fileList: if file.startswith(fileStartsWith) and file.endswith(fileEndsWith): print(file) ds = ogr.Open(directory + file) lyr = ds.GetLayer() for feat in lyr: out_feat = ogr.Feature(out_layer.GetLayerDefn()) out_feat.SetGeometry(feat.GetGeometryRef().Clone()) out_layer.CreateFeature(out_feat) out_feat = None
def create_output_shape(dirname): out_datasource = ogr.GetDriverByName("ESRI Shapefile").CreateDataSource(dirname) yield out_datasource out_datasource.Destroy()
def combine_ss_models(filedict, domains_shp, lt, outfile, nrml_version='04', weight=1.): #, id_base = 'ASS'): """ Combine smoothed seismicity models based on tectonic region types :params filedict: dict of form filedict[trt] = filename specifying input file for that region :params domains_shp: shapefile defining tectonic domain regions :params lt: LogicTree object containing relevant values and weights for Mmax :params outfile: output nrml formatted file """ print 'Getting tectonic region type from %s' % domains_shp driver = ogr.GetDriverByName("ESRI Shapefile") data_source = driver.Open(domains_shp, 0) dsf = data_source.GetLayer() trt_types = [] for feature in dsf: trt_types.append(feature.GetField('TRT')) dsf = shapefile.Reader(domains_shp) dom_shapes = dsf.shapes() hypo_depth_dist_nc = PMF([(0.5, 10.0), (0.25, 5.0), (0.25, 15.0)]) hypo_depth_dist_c = PMF([(0.5, 5.0), (0.25, 2.5), (0.25, 10.0)]) hypo_depth_dist_ex = hypo_depth_dist_c hypo_depth_dict = { 'Cratonic': hypo_depth_dist_c, 'Non_cratonic': hypo_depth_dist_nc, 'Extended': hypo_depth_dist_ex } nodal_plane_dist = PMF([(0.3, NodalPlane(0, 30, 90)), (0.2, NodalPlane(90, 30, 90)), (0.3, NodalPlane(180, 30, 90)), (0.2, NodalPlane(270, 30, 90))]) merged_pts = [] # Get mmax values and weights mmaxs = {} mmaxs_w = {} for trt, filename in filedict.iteritems(): if trt == 'Cratonic': mmax_values, mmax_weights = lt.get_weights('Mmax', 'Proterozoic') else: mmax_values, mmax_weights = lt.get_weights('Mmax', trt) mmax_values = [float(i) for i in mmax_values] mmax_weights = [float(i) for i in mmax_weights] print mmax_values print mmax_weights mmaxs[trt] = mmax_values mmaxs_w[trt] = mmax_weights pt_ids = [] for trt, filename in filedict.iteritems(): print trt print 'Parsing %s' % filename # Only keep points within domain pts = read_pt_source(filename) # shapes = np.where(trt_types for zone_trt, dom_shape in zip(trt_types, dom_shapes): print zone_trt print dom_shape if zone_trt == trt: print 'TRT %s, procesing shape %s' % (zone_trt, dom_shape) dom_poly = Polygon(dom_shape.points) for pt in pts: pt_loc = Point(pt.location.x, pt.location.y) if pt_loc.within(dom_poly): pt.tectonic_region_type = zone_trt pt.nodal_plane_distribution = nodal_plane_dist pt.hypocenter_distribution = hypo_depth_dict[zone_trt] pt.rupture_aspect_ratio = 2 mfd = pt.mfd new_mfd = gr2inc_mmax(mfd, mmaxs[trt], mmaxs_w[trt], weight) pt.mfd = new_mfd if pt.source_id in pt_ids: print 'Point source %s already exists!' % pt.source_id print 'Skipping this source for trt %s' % zone_trt else: merged_pts.append(pt) pt_ids.append(pt.source_id) name = outfile.rstrip('.xml') if nrml_version == '04': nodes = list(map(obj_to_node, sorted(merged_pts))) source_model = Node("sourceModel", {"name": name}, nodes=nodes) with open(outfile, 'wb') as f: nrml.write([source_model], f, '%s', xmlns=NAMESPACE)
second = 0 zone = -9 # https://www.esrl.noaa.gov/gmd/grad/solcalc/azel.html # watch out the longitude for the west hemisphere is positive and east is negative latitude = 35.668263 longitude = -139.697001 # create fields for different time stamps hours = [9, 12, 14, 17] # the output sunexpo shapefile shpfile = os.path.join( root, f'sunexpo-{year}-{month}-{day}-{hours[0]}h-to-{hours[1]}h.shp') # create a shpafile to save the sun duration driver = ogr.GetDriverByName('ESRI Shapefile') if os.path.exists(shpfile): driver.DeleteDataSource(shpfile) data_source = driver.CreateDataSource(shpfile) targetSpatialRef = osr.SpatialReference() targetSpatialRef.ImportFromEPSG(4326) outLayer = data_source.CreateLayer('Sunexpo', targetSpatialRef, ogr.wkbPoint) panoId = ogr.FieldDefn('panoid', ogr.OFTString) outLayer.CreateField(panoId) for hour in hours: fieldname = 'expo%s' % (hour)
# -*- coding: utf-8 -*- """ Created on Thu May 14 21:28:45 2020 @author: Emile.deBadts """ import pandas import networkx import gdal, ogr import json import numpy as np from qgis.core import QgsMessageLog MEM_DRIVER = ogr.GetDriverByName('MEMORY') MAX_ITERATIONS = 100 def get_angle(p0, p1=np.array([0, 0]), p2=None): if p2 is None: p2 = p1 + np.array([1, 0]) v0 = np.array(p0) - np.array(p1) v1 = np.array(p2) - np.array(p1) angle = np.math.atan2(np.linalg.det([v0, v1]), np.dot(v0, v1)) return np.degrees(angle) def bereken_bobs(task, trace_fn, dem_fn, minimale_dekking, maximale_valhoogte, egalisatie, verhang_tabel, egalisatiehoek):