def CheckDoubleGeomTwofilesCopy(shp1, shp2, field1, field2): """Priority to file No. 1 """ ds1 = vf.openToRead(shp1) lyr1 = ds1.GetLayer() ds2 = vf.openToRead(shp2) lyr2 = ds2.GetLayer() newshp = vf.copyShp(shp1, "commonshape") dict1 = dict() dict2 = dict() dict3 = dict() for feat in lyr1: values = [] ge = feat.GetGeometryRef() f = feat.GetFID() code = feat.GetField(field1) values.append(ge.ExportToWkt()) values.append(code) dict1[f] = values for feat in lyr2: values = [] ge = feat.GetGeometryRef() f = feat.GetFID() code = feat.GetField(field2) values.append(ge.ExportToWkt()) values.append(code) dict2[f] = values for k1, v1 in list(dict1.items()): for k2, v2 in list(dict2.items()): if v1 == v2: new_feat = lyr1.GetFeature(k1) vf.copyFeatInShp2(new_feat, newshp)
def count(shp1, shp2): ds = vf.openToWrite(shp1) lyr = ds.GetLayer() ds2 = vf.openToRead(shp2) lyr2 = ds2.GetLayer() lyr_defn = lyr.GetLayerDefn() field_names = [ lyr_defn.GetFieldDefn(i).GetName() for i in range(lyr_defn.GetFieldCount()) ] field = 'Count' if field in field_names: i = field_names.index(field) lyr.DeleteField(i) field_c = ogr.FieldDefn(field, ogr.OFTInteger) field_c.SetWidth(8) lyr.CreateField(field_c) for feat in lyr: geom = feat.GetGeometryRef() lyr2.SetSpatialFilter(geom) count = lyr2.GetFeatureCount() lyr.SetFeature(feat) feat.SetField("Count", count) lyr.SetFeature(feat)
def AreaPoly(shp1, shp2): ds = vf.openToWrite(shp1) lyr = ds.GetLayer() ds2 = vf.openToRead(shp2) lyr2 = ds2.GetLayer() lyr_defn = lyr.GetLayerDefn() field_names = [ lyr_defn.GetFieldDefn(i).GetName() for i in range(lyr_defn.GetFieldCount()) ] field = 'AreaP' if field in field_names: i = field_names.index(field) lyr.DeleteField(i) field_c = ogr.FieldDefn(field, ogr.OFTReal) field_c.SetWidth(8) lyr.CreateField(field_c) for feat in lyr: geom = feat.GetGeometryRef() area1 = geom.GetArea() lyr2.SetSpatialFilter(geom) area2 = 0 print(feat.GetFID()) for feat2 in lyr2: geom2 = feat2.GetGeometryRef() area2 = area2 + geom2.GetArea() areaP = (area2 / area1) * 100 lyr.SetFeature(feat) feat.SetField(field, areaP) lyr.SetFeature(feat)
def getFidList(vect): shape = vf.openToRead(vect) lyr = shape.GetLayer() fidlist = [] for feat in lyr: fidlist.append(feat.GetFID()) return list(set(fidlist))
def totalArea(shapefile, sizepix): ds = vf.openToRead(shapefile) lyr = ds.GetLayer() sizeT = 0 for feat in lyr: if feat.GetGeometryRef(): geom = feat.GetGeometryRef() area = geom.GetArea() size = (area / int(sizepix)) sizeT = sizeT + size return sizeT
def getFieldValues(shpfile, field): classes = [] ds = vf.openToRead(shpfile) layer = ds.GetLayer() for feature in layer: cl = feature.GetField(field) if cl not in classes: classes.append(cl) return classes
def listClasses(shpfile, field): ds = vf.openToRead(shpfile) layer = ds.GetLayer() classes = [] for feature in layer: cl = feature.GetField(field) if cl not in classes: classes.append(cl) classes.sort() return classes
def ListValues(shp): fields = vf.getFields(shp) print("The name of the fields are: " + ' - '.join(fields)) field = input("Field to list values: ") if not field in fields: print('This field does not exist. Verify!') sys.exit(1) ds = vf.openToRead(shp) layer = ds.GetLayer() values = [] for feat in layer: if not feat.GetField(field) in values: values.append(feat.GetField(field)) return values
def countByAtt(params): vector, classe, field = params ds = vf.openToRead(vector) layer = ds.GetLayer() layerDfn = layer.GetLayerDefn() fields = vf.getFields(vector) fieldTypeCode = layerDfn.GetFieldDefn(fields.index(field)).GetType() layer.ResetReading() totalarea = 0 for feat in layer: geom = feat.GetGeometryRef() totalarea += geom.GetArea() stats = [] if fieldTypeCode == 4: layer.SetAttributeFilter(field + " = \'" + str(classe) + "\'") featureCount = layer.GetFeatureCount() area = 0 for feat in layer: geom = feat.GetGeometryRef() area += geom.GetArea() partcl = area / totalarea * 100 print("Class # %s: %s features and a total area of %s (rate : %s)"%(str(classe), \ str(featureCount),\ str(area), \ str(round(partcl,4)))) stats.append([classe, featureCount, area, partcl]) layer.ResetReading() else: layer.SetAttributeFilter(field + " = " + str(classe)) featureCount = layer.GetFeatureCount() area = 0 for feat in layer: geom = feat.GetGeometryRef() area += geom.GetArea() partcl = area / totalarea * 100 print("Class # %s: %s features and a total area of %s (rate : %s)"%(str(classe), \ str(featureCount),\ str(area),\ str(round(partcl,4)))) stats.append([classe, featureCount, area, partcl]) layer.ResetReading() return stats
def gestionTraitementsClasse(cfg, layer, outfile, classe, ss_classe, source, res, area_thresh, pix_thresh, \ chp = None, value = None, buff = None): if ss_classe not in cfg.parameters.maskLineaire: if chp is not None: FileByClass.FileByClass(layer, chp, value, outfile) if os.path.exists(outfile): manageFieldShapefile(outfile, cfg.Nomenclature[classe].Code, area_thresh) # Verification de la géométrie vf.checkValidGeom(outfile) # suppression des doubles géométries outfile_ssdb = DeleteDuplicateGeometries.DeleteDupGeom(outfile) # Gestion du buffer linéaire / surfacique / Erosion des polygones ds = vf.openToRead(outfile_ssdb) lyr = ds.GetLayer() typeGeom = lyr.GetGeomType() lyr = None if buff == "None": buff = None # wkbLineString/wkbMultiLineString/wkbMultiLineString25D/wkbLineString25D if typeGeom in [2, 5, -2147483643, -2147483646 ] and buff is not None: outfile_buffline = outfile_ssdb[:-4] + 'buffline' + str( buff) + '.shp' BufferOgr.bufferPoly(outfile_ssdb, outfile_buffline, int(buff)) outfile_buff = outfile_buffline # wkbPolygon/wkbMultiPolygon/wkbPolygon25D/wkbMultiPolygon25D elif typeGeom in [3, 6, -2147483645, -2147483642 ] and buff is not None: outfile_buffsurf_tmp = outfile_ssdb[:-4] + 'buffsurf' + str( buff) + '_tmp.shp' outfile_buffsurf = outfile_ssdb[:-4] + 'buffsurf' + str( buff) + '.shp' BufferOgr.bufferPoly(outfile_ssdb, outfile_buffsurf_tmp, int(buff)) BufferOgr.bufferPoly(outfile_buffsurf_tmp, outfile_buffsurf, -int(buff)) outfile_buff = outfile_buffsurf else: outfile_buff = outfile_ssdb[:-4] + 'buffinv' + str( res) + '.shp' BufferOgr.bufferPoly(outfile_ssdb, outfile_buff, -int(res)) # Suppression des multipolygons outfile_spoly = outfile_buff[:-4] + 'spoly' + '.shp' MultiPolyToPoly.multipoly2poly(outfile_buff, outfile_spoly) # recalcul des superficies AddFieldArea.addFieldArea(outfile_spoly, area_thresh) # Selection en fonction de la surface des polygones outfile_area = outfile_spoly[:-4] + 'sup' + str( pix_thresh) + 'pix.shp' SelectBySize.selectBySize(outfile_spoly, 'Area', pix_thresh, outfile_area) # Verification de la géométrie vf.checkValidGeom(outfile_area) return outfile_area else: print( 'Aucun échantillon pour la classe {} dans la base de données {}' .format(classe, source)) return None else: if chp is not None: FileByClass.FileByClass(layer, chp, value, outfile) # Gestion du buffer linéaire ds = vf.openToRead(layer) lyr = ds.GetLayer() typeGeom = lyr.GetGeomType() lyr = None if buff == "None": buff = None if typeGeom in [2, 5, -2147483643, -2147483646] and buff is not None: outfile_buff_mask = outfile[:-4] + 'buffline' + str( buff) + '_mask.shp' BufferOgr.bufferPoly(outfile, outfile_buff_mask, int(buff)) os.system("ls {}".format(outfile_buff_mask)) return outfile_buff_mask else: print("la donnée n'est pas de type linéaire") return None
def intersection(file1, file2, outfile): ds1 = vf.openToRead(file1) ds2 = vf.openToRead(file2) layer1 = ds1.GetLayer() layer2 = ds2.GetLayer() if layer1.GetSpatialRef().GetAttrValue( "AUTHORITY", 1) == layer2.GetSpatialRef().GetAttrValue("AUTHORITY", 1): srsObj = layer1.GetSpatialRef() else: print("second shapefile must have the same projection than the first shapefile (EPSG:{} vs. EPSG:{})"\ .format(layer1.GetSpatialRef().GetAttrValue("AUTHORITY", 1), layer2.GetSpatialRef().GetAttrValue("AUTHORITY", 1))) sys.exit(-1) outDriver = ogr.GetDriverByName("ESRI Shapefile") # Find geometry of the intersection if defineIntersectGeometry(layer1, layer2) in ['POLYGON', 'MULTIPOLYGON']: #if exists, delete it if os.path.exists(outfile): outDriver.DeleteDataSource(outfile) outDataSource = outDriver.CreateDataSource(outfile) #Creates the spatial reference of the output layer outLayer = outDataSource.CreateLayer("intersect", srsObj, geom_type=ogr.wkbPolygon) else: print("This program only produces POLYGONS intersection") # gestion des champs du premier layer inLayerDefn = layer1.GetLayerDefn() for i in range(0, inLayerDefn.GetFieldCount()): fieldDefn = inLayerDefn.GetFieldDefn(i) outLayer.CreateField(fieldDefn) # gestion des champs du second layer inLayerDefn = layer2.GetLayerDefn() for i in range(0, inLayerDefn.GetFieldCount()): fieldDefn = inLayerDefn.GetFieldDefn(i) outLayer.CreateField(fieldDefn) # Liste des champs de trois entités listfieldin1 = vf.getFields(layer1) listfieldin2 = vf.getFields(layer2) listfieldout = vf.getFields(outLayer) layer1.ResetReading() layer2.ResetReading() for feature1 in layer1: geom1 = feature1.GetGeometryRef() for feature2 in layer2: geom2 = feature2.GetGeometryRef() # select only the intersections if geom2.Intersects(geom1): intersection = geom2.Intersection(geom1) dstfeature = ogr.Feature(outLayer.GetLayerDefn()) dstfeature.SetGeometry(intersection) #gestion des champs i = 0 j = 0 k = 0 while i < len(listfieldout): while j < len(listfieldin1): dstfeature.SetField(listfieldout[i], feature1.GetField(listfieldin1[j])) i += 1 j += 1 while k < len(listfieldin2): dstfeature.SetField(listfieldout[i], feature2.GetField(listfieldin2[k])) i += 1 k += 1 outLayer.CreateFeature(dstfeature) dstfeature.Destroy() layer2.ResetReading() outLayer = None outDataSource = None
def DifferenceFiles(shp1, shp2): outShp = vf.copyShp(shp1, 'difference') fields = vf.getFields(shp1) ds1 = vf.openToRead(shp1) ds2 = vf.openToRead(shp2) lyr1 = ds1.GetLayer() lyr2 = ds2.GetLayer() layerDef = lyr1.GetLayerDefn() print(lyr2.GetFeatureCount()) for f1 in lyr1: lyr2.SetAttributeFilter(None) geom1 = f1.GetGeometryRef() centroid = geom1.Centroid() x = centroid.GetX() y = centroid.GetY() minX = x - float(distance) minY = y - float(distance) maxX = x + float(distance) maxY = y + float(distance) lyr2.SetSpatialFilterRect(float(minX), float(minY), float(maxX), float(maxY)) nbfeat2 = lyr2.GetFeatureCount() intersection = False listFID = [] copy = False for i in range(0, nbfeat2): ds3 = vf.openToRead(outShp) lyr3 = ds3.GetLayer() lyr3.SetSpatialFilterRect(float(minX), float(minY), float(maxX), float(maxY)) f2 = lyr2.GetFeature(i) print(str(f1.GetFID()) + " - " + str(i)) geom2 = f2.GetGeometryRef() if geom1.Intersect(geom2) == True: print("True") if geom1.Equal(geom2) == True: if vf.VerifyGeom(geom, lyr3) == False: vf.copyFeatInShp(f1, outShp) elif geom1.Equal(geom2) == False: newgeom = vf.Difference(geom1, geom2) newgeom2 = ogr.CreateGeometryFromWkb(newgeom.wkb) newgeom2 = geom1.Difference(geom2) #print newgeom2 newFeature = ogr.Feature(layerDef) newFeature.SetGeometry(newgeom2) for field in fields: newFeature.SetField(field, f1.GetField(field)) if vf.VerifyGeom(newgeom2, lyr3) == False: vf.copyFeatInShp(newFeature, outShp) newFeature.Destroy() elif geom1.Intersect(geom2) == False: print("False") if not vf.VerifyGeom(geom1, lyr3): vf.copyFeatInShp(f1, outShp) f2.Destroy() f1.Destroy() ds2 = vf.openToRead(shp2) lyr2 = ds2.GetLayer() ds3 = vf.openToWrite(outShp) lyr3 = ds3.GetLayer() for feat in lyr3: geom1 = feat.GetGeometryRef() centroid = geom1.Centroid() x = centroid.GetX() y = centroid.GetY() minX = x - float(distance) minY = y - float(distance) maxX = x + float(distance) maxY = y + float(distance) lyr2.SetSpatialFilterRect(float(minX), float(minY), float(maxX), float(maxY)) nbfeat2 = lyr2.GetFeatureCount() intersection = False listFID = [] copy = False for i in range(0, nbfeat2): f2 = lyr2.GetFeature(i) geom2 = f2.GetGeometryRef() if geom1.Intersect(geom2) == True: lyr3.DeleteFeature(feat.GetFID()) ds3.ExecuteSQL('REPACK ' + lyr3.GetName()) return outShp
def countByAtt(shpfile, field, storecsv="", val=None): ds = vf.openToRead(shpfile) fields = vf.getFields(shpfile) layer = ds.GetLayer() if val is None: for feature in layer: cl = feature.GetField(field) if cl not in classes: classes.append(cl) else: classes.append(val) layerDfn = layer.GetLayerDefn() fieldTypeCode = layerDfn.GetFieldDefn(fields.index(field)).GetType() classes.sort() layer.ResetReading() totalarea = 0 if "POLYGON" in vf.getGeomTypeFromFeat(shpfile): for feat in layer: geom = feat.GetGeometryRef() if geom: if not math.isnan(geom.GetArea()): totalarea += geom.GetArea() stats = [] for cl in classes: if fieldTypeCode == 4: layer.SetAttributeFilter(field + " = \"" + str(cl) + "\"") featureCount = layer.GetFeatureCount() if "POLYGON" in vf.getGeomTypeFromFeat(shpfile): area = 0 for feat in layer: geom = feat.GetGeometryRef() if geom: area += geom.GetArea() partcl = area / totalarea * 100 if storecsv == "" or storecsv is None: print( "Class # %s: %s features and a total area of %s (rate : %s)" % (str(cl), str(featureCount), str(area), str(round(partcl, 2)))) stats.append([cl, featureCount, area, partcl]) else: print("Class # %s: %s features" % (str(cl), str(featureCount))) stats.append([cl, featureCount]) layer.ResetReading() else: layer.SetAttributeFilter(field + " = " + str(cl)) featureCount = layer.GetFeatureCount() if "POLYGON" in vf.getGeomTypeFromFeat(shpfile): area = 0 for feat in layer: geom = feat.GetGeometryRef() if geom: area += geom.GetArea() partcl = area / totalarea * 100 if storecsv == "" or storecsv is None: print( "Class # %s: %s features and a total area of %s (rate : %s)" % (str(cl), str(featureCount), str(area), str(round(partcl, 2)))) stats.append([cl, featureCount, area, partcl]) else: print("Class # %s: %s features" % (str(cl), str(featureCount))) stats.append([cl, featureCount]) layer.ResetReading() if storecsv != "" and storecsv is not None: with open(storecsv, "w") as f: writer = csv.writer(f) writer.writerows(stats) return stats
def zonalstats(path, rasters, params, output, paramstats, classes="", bufferDist=None, nodata=0, gdalpath="", systemcall=False, gdalcachemax="9000", logger=LOGGER): """Compute zonal statistitics (descriptive and categorical) on multi-band raster or multi-rasters based on Point (buffered or not) or Polygon zonal vector Parameters ---------- path : string working directory rasters : list list of rasters to analyse params : list list of fid list and vector file output : vector file (sqlite, shapefile and geojson) vector file to store statistitics paramstats : list list of statistics to compute (e.g. {1:'stats', 2:'rate'}) - paramstats = {1:"rate", 2:"statsmaj", 3:"statsmaj", 4:"stats", 2:stats_cl} - stats : mean_b, std_b, max_b, min_b - statsmaj : meanmaj, stdmaj, maxmaj, minmaj of majority class - rate : rate of each pixel value (classe names) - stats_cl : mean_cl, std_cl, max_cl, min_cl of one class - val : value of corresponding pixel (only for Point geometry and without other stats) classes : nomenclature file nomenclature bufferDist : int in case of point zonal vector : buffer size gdalpath : string path of gdal binaries (for system execution) systemcall : boolean if True, wrapped raster are stored in working dir gdalcachemax : string gdal cache for wrapping operation (in Mb) """ LOGGER.info("Begin to compute zonal statistics for vector file %s" % (output)) if systemcall and not gdalpath: LOGGER.info( "Please provide gdal binaries path when systemcall is set to true") sys.exit() if os.path.exists(output): return # Get bands or raster number if len(rasters) != 1: nbbands = len(rasters) else: nbbands = fut.getRasterNbands(rasters[0]) # Prepare and check validity of statistics methods and input raster paramstats = checkmethodstats(rasters, paramstats, nbbands) # Get vector file and FID list if len(params) > 1: vector, idvals = params else: vector = params[0][0] idvals = params[0][1] # if no vector subsetting (all features) fullfid = getFidList(vector) if not idvals: idvals = fullfid novals = [] else: novals = [x for x in fullfid if x not in idvals] # vector open and iterate features and/or buffer geom vectorname = os.path.splitext(os.path.basename(vector))[0] vectorgeomtype = vf.getGeomType(vector) vectorbuff = None # Prepare schema of output geopandas dataframe (geometry type and columns formatting) schema = setPandasSchema(paramstats, vectorgeomtype, bufferDist) # Buffer Point vector file if bufferDist and vectorgeomtype in (1, 4, 1001, 1004): vectorbuff = os.path.join(path, vectorname + "buff.shp") _ = bfo.bufferPoly(vector, vectorbuff, bufferDist=bufferDist) vectorgeomtype = vf.getGeomType(vectorbuff) # Store input vector in output geopandas dataframe vectgpad = gpad.read_file(vector) # Prepare statistics columns of output geopandas dataframe stats = definePandasDf(vectgpad, idvals, paramstats, classes) # Iterate FID list dataset = vf.openToRead(vector) lyr = dataset.GetLayer() if "fid" in [x.lower() for x in vf.getFields(vector)]: raise ValueError( "FID field not allowed. This field name is reserved by gdal binary." ) for idval in idvals: if vectorgeomtype in (1, 4, 1001, 1004): if 'val' in list(paramstats.values()): lyr.SetAttributeFilter("FID=" + str(idval)) for feat in lyr: geom = feat.GetGeometryRef() if geom: if vectorgeomtype == 4: point = geom.GetGeometryRef(0) xpt = point.GetX() ypt = point.GetY() else: xpt, ypt, _ = geom.GetPoint() # Switch to buffered vector (Point and bufferDist) if bufferDist: if vectorbuff: vector = vectorbuff # creation of wrapped rasters success, bands, err = extractRasterArray(rasters, paramstats, vector, vectorgeomtype, idval, gdalpath, gdalcachemax, systemcall, path) if success: if 'val' in list(paramstats.values()): stats = extractPixelValue(rasters, bands, paramstats, xpt, ypt, stats, idval) else: stats = computeStats(bands, paramstats, stats, idval, nodata) else: print( "gdalwarp problem for feature %s (%s) : statistic computed with rasterio" % (idval, err)) # Prepare columns name and format of output dataframe if "rate" in list(paramstats.values()) and classes != "": stats, schema = formatDataFrame(stats, schema, novals, True, classes) else: stats, schema = formatDataFrame(stats, schema, novals) # exportation dataframeExport(stats, output, schema) LOGGER.info("End to compute zonal statistics for vector file %s" % (output))
def foret_non_foret(chemin, FileInit, FileOut, convex=0.7, compa=0.4, elong=2.5): os.chdir(chemin) print(os.path.join(chemin, FileInit)) vf.checkValidGeom(FileInit) # Erosion puis dilatation du fichier initial pour ne garder que les contours (lissés) des forêts bo.bufferPoly(FileInit, 'tmp_Erosion20.shp', -20) bo.bufferPoly('tmp_Erosion20.shp', 'tmp_Dilatation20_poly.shp', 20) # Dilatation supplémentaire pour récupérer les objets qui disparaissent suite à l'ouverture par selection spatiale bo.bufferPoly('tmp_Dilatation20_poly.shp', 'tmp_Extra_Dila_poly.shp', 20) mpp.multipoly2poly('tmp_Dilatation20_poly.shp', 'tmp_Dilatation20.shp') mpp.multipoly2poly('tmp_Extra_Dila_poly.shp', 'tmp_Extra_Dila.shp') # Différentiation Forêt / Non-Forêt # Soustraction de l'ouverture par rapport au fichier Forêt non différencier #os.system('python DifferenceQGIS.py ' + FileInit + ' tmp_Dilatation20.shp True tmp_Non_foret_temp_poly.shp') sd.shapeDifference(FileInit, 'tmp_Dilatation20.shp', 'tmp_Non_foret_temp_poly.shp', False, None) # Transformation des multipolygones en polygones simples mpp.multipoly2poly('tmp_Non_foret_temp_poly.shp', 'tmp_Non_Foret_temp.shp') # Elimination des résidus de la symétries pour obtenir les vrais contours de la forêt NF = vf.openToWrite('tmp_Non_Foret_temp.shp') ED = vf.openToRead('tmp_Extra_Dila.shp') NFLayer = NF.GetLayer() EDLayer = ED.GetLayer() for fil in EDLayer: gfil = fil.GetGeometryRef() for f in NFLayer: g = f.GetGeometryRef() fID = f.GetFID() if gfil.Contains(g): NFLayer.DeleteFeature(fID) NFLayer.ResetReading() NF.Destroy() ED.Destroy() NFLayer = None EDLayer = None # Soustraction de la non-forêt par rapport au fichier initial, pour obtenir les vrais contours de la forêt sd.shapeDifference(FileInit, 'tmp_Non_Foret_temp.shp', 'tmp_Foret_temp_poly.shp', False, None) vf.checkValidGeom('tmp_Foret_temp_poly.shp') # Transformation des multipolygones en polygones simples mpp.multipoly2poly('tmp_Foret_temp_poly.shp', 'tmp_Foret_temp.shp') # Tri des forêts, si inférieure à 5000m2, classée en non_forêt F = vf.openToWrite('tmp_Foret_temp.shp') FLayer = F.GetLayer() for f in FLayer: g = f.GetGeometryRef() fID = f.GetFID() if g.GetArea() < 5000: FLayer.DeleteFeature(fID) F.Destroy() FLayer = None vf.checkValidGeom('tmp_Foret_temp.shp') # Soustraction des forêts du fichier original pour récupérer les bosquets et boqueteaux dans la couche non-forêt sd.shapeDifference(FileInit, 'tmp_Foret_temp.shp', 'tmp_Non_foret_full_poly.shp', False, None) vf.checkValidGeom('tmp_Non_foret_full_poly.shp') # Transformation des multipolygones en polygones simples mpp.multipoly2poly('tmp_Non_foret_full_poly.shp', 'tmp_Non_Foret_full.shp') vf.checkValidGeom('tmp_Non_Foret_full.shp') # Affinage Non-Forêt # Calcul des champs de discrimination Elongation, Convexité et Compacité addDiscriminationFields('tmp_Non_Foret_full.shp') # Ajout d'un champs classe qui détermine la classe des polygones en fonction des champs discriminants addClassAHF('tmp_Non_Foret_full.shp', convex, compa, elong) # Post-processing # Si un polygone est classé en forêt est connecté à une plus grande forêt, fusion des deux polygones en une seule et même forêt F = vf.openToRead('tmp_Foret_temp.shp') FL = F.GetLayer() NF = vf.openToWrite('tmp_Non_Foret_full.shp') NFL = NF.GetLayer() for foret in FL: gforet = foret.GetGeometryRef() for nonforet in NFL: fID = nonforet.GetFID() gnonforet = nonforet.GetGeometryRef() if gnonforet.Distance(gforet) == 0 and ( nonforet.GetField('ClassAHF') == 'Foret' or nonforet.GetField('ClassAHF') == 'AutreAHF'): NFL.DeleteFeature(fID) NFL.ResetReading() F.Destroy() NF.Destroy() FL = None NFL = None vf.checkValidGeom('tmp_Non_Foret_full.shp') # Différence entre le fichier imitial et les nouvelles forêts sd.shapeDifference(FileInit, 'tmp_Non_Foret_full.shp', 'tmp_Foret_full_poly.shp', False, None) vf.checkValidGeom('tmp_Foret_full_poly.shp') # Transformation des multipolygones en polygones simples mpp.multipoly2poly('tmp_Foret_full_poly.shp', 'tmp_Foret_full.shp') # Ajout du champs classe pour les forêts F = vf.openToWrite('tmp_Foret_full.shp') FL = F.GetLayer() new_field = ogr.FieldDefn('ClassAHF', ogr.OFTString) FL.CreateField(new_field) for f in FL: f.SetField('ClassAHF', 'Foret') FL.SetFeature(f) F.Destroy() FL = None # Fusion du fichier de forêts avec le fichier de non-forêt mf.mergeVectors(['tmp_Foret_full.shp', 'tmp_Non_Foret_full.shp'], FileOut) # Suppression des fichiers intermédiaires os.system('rm tmp_*')