def getFidArea(shapefile, classf=""): driver = ogr.GetDriverByName("ESRI Shapefile") datasource = driver.Open(shapefile, 0) layer = datasource.GetLayer() fieldlist = vf.getFields(layer) if classf != "" and classf is not None: try: fieldlist.index(classf) except: print("The field {} does not exist in the input shapefile".format( classf)) print("You must choose one of these existing fields : {}".format( ' / '.join(fieldlist))) sys.exit(-1) listid = [] for feat in layer: geom = feat.GetGeometryRef() if geom is not None: if classf != "" and classf is not None: listid.append( [feat.GetFID(), feat.GetField(classf), geom.GetArea()]) else: # fake class to 1 if no field class provided listid.append([feat.GetFID(), 1, geom.GetArea()]) layer = datasource = None return listid
def manageFieldShapefile(shapefile, fieldout, areapix, outformat="ESRI shapefile"): # existing fields fieldList = vf.getFields(shapefile, outformat) #fieldList = [x.lower() for x in fieldList] #fieldList.remove(fieldout.lower()) fieldList.remove(fieldout) # FID creation if 'ID' not in fieldList: AddFieldID.addFieldID(shapefile) else: fieldList.remove('ID') # Area field creation if 'Area' in fieldList: DeleteField.deleteField(shapefile, 'Area') AddFieldArea.addFieldArea(shapefile, areapix) fieldList.remove('Area') else: AddFieldArea.addFieldArea(shapefile, areapix) # Suppression des champs initiaux for field in fieldList: DeleteField.deleteField(shapefile, field)
def manageFieldShapefile(shapefile, value, areapix): # Liste des champs existants fieldList = vf.getFields(shapefile) # Creation d'un FID unique if 'ID' in fieldList: DeleteField.deleteField(shapefile, 'ID') AddFieldID.addFieldID(shapefile) fieldList.remove('ID') else: AddFieldID.addFieldID(shapefile) if cfg.parameters.landCoverField in fieldList: DeleteField.deleteField(shapefile, cfg.parameters.landCoverField) AddField.addField(shapefile, cfg.parameters.landCoverField, value) fieldList.remove(cfg.parameters.landCoverField) else: AddField.addField(shapefile, cfg.parameters.landCoverField, value) if 'Area' in fieldList: DeleteField.deleteField(shapefile, 'Area') AddFieldArea.addFieldArea(shapefile, areapix) fieldList.remove('Area') else: AddFieldArea.addFieldArea(shapefile, areapix) # Suppression des champs initiaux for field in fieldList: DeleteField.deleteField(shapefile, field)
def ListValues(shp): fields = vf.getFields(shp) print("The name of the fields are: " + ' - '.join(fields)) field = input("Field to list values: ") if not field in fields: print('This field does not exist. Verify!') sys.exit(1) ds = vf.openToRead(shp) layer = ds.GetLayer() values = [] for feat in layer: if not feat.GetField(field) in values: values.append(feat.GetField(field)) return values
def countByAtt(params): vector, classe, field = params ds = vf.openToRead(vector) layer = ds.GetLayer() layerDfn = layer.GetLayerDefn() fields = vf.getFields(vector) fieldTypeCode = layerDfn.GetFieldDefn(fields.index(field)).GetType() layer.ResetReading() totalarea = 0 for feat in layer: geom = feat.GetGeometryRef() totalarea += geom.GetArea() stats = [] if fieldTypeCode == 4: layer.SetAttributeFilter(field + " = \'" + str(classe) + "\'") featureCount = layer.GetFeatureCount() area = 0 for feat in layer: geom = feat.GetGeometryRef() area += geom.GetArea() partcl = area / totalarea * 100 print("Class # %s: %s features and a total area of %s (rate : %s)"%(str(classe), \ str(featureCount),\ str(area), \ str(round(partcl,4)))) stats.append([classe, featureCount, area, partcl]) layer.ResetReading() else: layer.SetAttributeFilter(field + " = " + str(classe)) featureCount = layer.GetFeatureCount() area = 0 for feat in layer: geom = feat.GetGeometryRef() area += geom.GetArea() partcl = area / totalarea * 100 print("Class # %s: %s features and a total area of %s (rate : %s)"%(str(classe), \ str(featureCount),\ str(area),\ str(round(partcl,4)))) stats.append([classe, featureCount, area, partcl]) layer.ResetReading() return stats
def multipoly2poly(inshape, outshape, do_correction=True, outformat="ESRI shapefile"): """Check if a geometry is a MULTIPOLYGON, if it is it will not be split in POLYGON Parameters ---------- inshape : string input shapeFile outshape : string output shapeFile do_correction : bool flag to remove MULTIPOLYGONs Return ------ int number of MULTIPOLYGON found """ # Get field list field_name_list = vf.getFields(inshape) # Open input and output shapefile driver = ogr.GetDriverByName(outformat) in_ds = driver.Open(inshape, 0) in_lyr = in_ds.GetLayer() inLayerDefn = in_lyr.GetLayerDefn() srsObj = in_lyr.GetSpatialRef() if os.path.exists(outshape): driver.DeleteDataSource(outshape) out_lyr = None if do_correction: out_ds = driver.CreateDataSource(outshape) out_lyr = out_ds.CreateLayer('poly', srsObj, geom_type=ogr.wkbPolygon) for i in range(0, len(field_name_list)): fieldDefn = inLayerDefn.GetFieldDefn(i) fieldName = fieldDefn.GetName() if fieldName not in field_name_list: continue out_lyr.CreateField(fieldDefn) multipoly = manageMultiPoly2Poly(in_lyr, out_lyr, field_name_list, do_correction) return multipoly
def changeName(filein, fieldin, fieldout): fieldList = vf.getFields(filein) if fieldout in fieldList: print("Field name {} already exists".format(fieldout)) sys.exit(1) # Get input file and field characteritics source = ogr.Open(filein, 1) layer = source.GetLayer() layer_defn = layer.GetLayerDefn() i = layer_defn.GetFieldIndex(fieldin) # Create the out field with in field characteristics try: fieldTypeCode = layer_defn.GetFieldDefn(i).GetType() fieldWidth = layer_defn.GetFieldDefn(i).GetWidth() fieldPrecision = layer_defn.GetFieldDefn(i).GetPrecision() except: print("Field {} not exists in the input shapefile".format(fieldin)) sys.exit(0) newField = ogr.FieldDefn(fieldout, fieldTypeCode) newField.SetWidth(fieldWidth) newField.SetPrecision(fieldPrecision) layer.CreateField(newField) for feat in layer: val = feat.GetField(fieldin) layer.SetFeature(feat) feat.SetField(fieldout, val) layer.SetFeature(feat) layer = feat = newfield = source = None DeleteField.deleteField(filein, fieldin)
def check_ground_truth(input_vector, output_vector, data_field, epsg, do_corrections, display=False): """ """ import os from iota2.Common.FileUtils import removeShape from iota2.Common.FileUtils import cpShapeFile from iota2.VectorTools import checkGeometryAreaThreshField from iota2.VectorTools.vector_functions import getFields from iota2.VectorTools.vector_functions import getFieldType from iota2.VectorTools.vector_functions import checkEmptyGeom from iota2.VectorTools.vector_functions import get_vector_proj from iota2.VectorTools.vector_functions import checkValidGeom from iota2.VectorTools.DeleteDuplicateGeometriesSqlite import deleteDuplicateGeometriesSqlite from iota2.VectorTools.MultiPolyToPoly import multipoly2poly from iota2.Common import ServiceError tmp_files = [] input_vector_fields = getFields(input_vector) errors = [] # check vector's projection vector_projection = get_vector_proj(input_vector) if not int(epsg) == int(vector_projection): error_msg = "{} projection ({}) incorrect".format( input_vector, vector_projection) errors.append(ServiceError.invalidProjection(error_msg)) # check vector's name name_check = vector_name_check(input_vector) if name_check is False: error_msg = "file's name not correct, it must start with an ascii letter" errors.append(ServiceError.namingConvention(error_msg)) # check field if not data_field in input_vector_fields: errors.append(ServiceError.missingField(input_vector, data_field)) else: # check field's type label_field_type = getFieldType(input_vector, data_field) if not label_field_type is int: errors.append(ServiceError.fieldType(input_vector, data_field, int)) # geometries checks shape_no_empty_name = "no_empty.shp" shape_no_empty_dir = os.path.split(input_vector)[0] shape_no_empty = os.path.join(shape_no_empty_dir, shape_no_empty_name) shape_no_empty, empty_geom_number = checkEmptyGeom( input_vector, do_corrections=do_corrections, output_file=shape_no_empty) if empty_geom_number != 0: error_msg = "'{}' contains {} empty geometries".format( input_vector, empty_geom_number) if do_corrections: error_msg = "{} and they were removed".format(error_msg) errors.append(ServiceError.emptyGeometry(error_msg)) tmp_files.append(shape_no_empty) # remove duplicates features shape_no_duplicates_name = "no_duplicates.shp" shape_no_duplicates_dir = os.path.split(input_vector)[0] shape_no_duplicates = os.path.join(shape_no_duplicates_dir, shape_no_duplicates_name) shape_no_duplicates, duplicated_features = deleteDuplicateGeometriesSqlite( shape_no_empty, do_corrections, shape_no_duplicates, quiet_mode=True) if duplicated_features != 0: error_msg = "'{}' contains {} duplicated features".format( input_vector, duplicated_features) if do_corrections: error_msg = "{} and they were removed".format(error_msg) errors.append(ServiceError.duplicatedFeatures(error_msg)) tmp_files.append(shape_no_duplicates) # remove multipolygons shape_no_multi_name = "no_multi.shp" shape_no_multi_dir = os.path.split(input_vector)[0] shape_no_multi = os.path.join(shape_no_multi_dir, shape_no_multi_name) multipolygons_number = multipoly2poly(shape_no_duplicates, shape_no_multi, do_corrections) if multipolygons_number != 0: error_msg = "'{}' contains {} MULTIPOLYGON".format( input_vector, multipolygons_number) if do_corrections: error_msg = "{} and they were removed".format(error_msg) errors.append(ServiceError.containsMultipolygon(error_msg)) tmp_files.append(shape_no_multi) # Check valid geometry shape_valid_geom_name = "valid_geom.shp" shape_valid_geom_dir = os.path.split(input_vector)[0] shape_valid_geom = os.path.join(shape_valid_geom_dir, shape_valid_geom_name) shape_valid_geom = output_vector if output_vector else shape_valid_geom input_valid_geom_shape = shape_no_multi if do_corrections else shape_no_duplicates cpShapeFile(input_valid_geom_shape.replace(".shp", ""), shape_valid_geom.replace(".shp", ""), extensions=[".prj", ".shp", ".dbf", ".shx"]) shape_valid_geom, invalid_geom, invalid_geom_corrected = checkValidGeom( shape_valid_geom, display=False) # remove features with None geometries none_geoms = remove_invalid_features(shape_valid_geom) invalid_geom += none_geoms if invalid_geom != 0: error_msg = "'{}' contains {} invalid geometries".format( input_vector, invalid_geom) errors.append(ServiceError.invalidGeometry(error_msg)) if output_vector is not None: tmp_files.append(shape_valid_geom) for tmp_file in tmp_files: if tmp_file is not input_vector and os.path.exists(tmp_file): removeShape(tmp_file.replace(".shp", ""), [".prj", ".shp", ".dbf", ".shx"]) if display: print("\n".join(errors)) return errors
def get_randomPolyAreaThresh(wd, shapefile, field, classe, thresh, outlistfid="", split=1, outShapefile=None, nolistid=None): # Get Id and Area of all features driver = ogr.GetDriverByName("ESRI Shapefile") dataSource = driver.Open(shapefile, 0) layer = dataSource.GetLayer() # Field type fieldList = vf.getFields(layer) try: indfield = fieldList.index(field) except: print( "The field {} does not exist in the input shapefile".format(field)) print("You must choose one of these existing fields : {}".format( ' / '.join(fieldList))) sys.exit(-1) inLayerDefn = layer.GetLayerDefn() fieldTypeCode = inLayerDefn.GetFieldDefn(indfield).GetType() fieldType = inLayerDefn.GetFieldDefn(indfield).GetFieldTypeName( fieldTypeCode) # Filter on given class if fieldType != "String": layer.SetAttributeFilter(field + "=" + str(classe)) else: layer.SetAttributeFilter(field + '=\"' + classe + '\"') print("Get FID and Area values") #listid = [] listiddic = {} for feat in layer: geom = feat.GetGeometryRef() if geom: listiddic[feat.GetFID()] = geom.GetArea() if nolistid is not None: f = open(nolistid, 'r') nolistidstr = f.readline() nolistidtab = nolistidstr.split(',') nofid = set([int(y) for y in nolistidtab]) listidtokeep = set(list(listiddic.keys())).difference(nofid) listidfinal = [(x, listiddic[x]) for x in list(listidtokeep)] #listidfinal = [x for x in listid if x[0] in listidtokeep] #print listid #listid = [x for x in listid if x[0] not in [int(y) for y in nolistidtab]] #print listid else: listidfinal = list(listiddic.items()) print("Random selection") # random selection based on area sum threshold sumarea = 0 listToChoice = [] while float(sumarea) <= float(thresh) and len(listidfinal) != 0: elt = random.sample(listidfinal, 1) listToChoice.append(elt[0][0]) listidfinal.remove(elt[0]) sumarea += float(elt[0][1]) strCondglob = ",".join([str(x) for x in listToChoice]) if outlistfid != None: print("Listid") f = open(outlistfid, 'w') f.write(strCondglob) f.close() sqlite3_query_limit = 1000.0 if outShapefile is not None: lyrtmpsqlite = os.path.splitext(os.path.basename(shapefile))[0] tmpsqlite = os.path.join(wd, "tmp" + lyrtmpsqlite + '.sqlite') os.system('ogr2ogr -preserve_fid -f "SQLite" %s %s' % (tmpsqlite, shapefile)) conn = db.connect(tmpsqlite) cursor = conn.cursor() nb_sub_split_SQLITE = int( math.ceil(len(listToChoice) / sqlite3_query_limit)) sub_FID_sqlite = fut.splitList(listToChoice, nb_sub_split_SQLITE) subFid_clause = [] for subFID in sub_FID_sqlite: subFid_clause.append("(ogc_fid not in ({}))".format(", ".join( map(str, subFID)))) fid_clause = " AND ".join(subFid_clause) sql_clause = "DELETE FROM %s WHERE %s" % (lyrtmpsqlite, fid_clause) cursor.execute(sql_clause) conn.commit() conn = cursor = None os.system('ogr2ogr -f "ESRI Shapefile" %s %s' % (outShapefile, tmpsqlite)) print( "Random Selection of polygons with value '{}' of field '{}' done and stored in '{}'" .format(classe, field, outShapefile))
def test_iota2_vectortools(self): """Test how many samples must be add to the sample set """ # Add Field for ext in ['.shp', '.dbf', '.shx', '.prj']: shutil.copyfile( os.path.splitext(self.classif)[0] + ext, os.path.splitext(self.classifwd)[0] + ext) afp.addFieldPerimeter(self.classifwd) tmpbuff = os.path.join(self.wd, "tmpbuff.shp") bfo.bufferPoly(self.classifwd, tmpbuff, -10) for ext in ['.shp', '.dbf', '.shx', '.prj']: shutil.copyfile( os.path.splitext(tmpbuff)[0] + ext, os.path.splitext(self.classifwd)[0] + ext) cnf.changeName(self.classifwd, "Classe", "class") self.assertEqual(vf.getNbFeat(self.classifwd), 144, "Number of features does not fit") self.assertEqual(vf.getFields(self.classifwd), [ 'Validmean', 'Validstd', 'Confidence', 'Hiver', 'Ete', 'Feuillus', 'Coniferes', 'Pelouse', 'Landes', 'UrbainDens', 'UrbainDiff', 'ZoneIndCom', 'Route', 'PlageDune', 'SurfMin', 'Eau', 'GlaceNeige', 'Prairie', 'Vergers', 'Vignes', 'Perimeter', 'class' ], "List of fields does not fit") self.assertEqual( vf.ListValueFields(self.classifwd, "class"), ['11', '12', '211', '222', '31', '32', '36', '42', '43', '51'], "Values of field 'class' do not fit") self.assertEqual( vf.getFieldType(self.classifwd, "class"), str, "Type of field 'class' (%s) do not fit, 'str' expected" % (vf.getFieldType(self.classifwd, "class"))) cfr.conFieldRecode(self.classifwd, "class", "mask", 11, 0) so.intersectSqlites(self.classifwd, self.inter, self.wd, self.outinter, 2154, "intersection", [ 'class', 'Validmean', 'Validstd', 'Confidence', 'ID', 'Perimeter', 'Aire', "mask" ]) check.checkGeometryAreaThreshField(self.outinter, 100, 1, self.classifwd) self.assertEqual(vf.getNbFeat(self.classifwd), 102, "Number of features does not fit") sba.extractFeatureFromShape(self.classifwd, 3, "mask", self.wd) mf.mergeVectors([ os.path.join(self.wd, "classif0_0.shp"), os.path.join(self.wd, "classif0_1.shp"), os.path.join(self.wd, "classif0_2.shp") ], self.classifwd) self.assertEqual(vf.getFirstLayer(self.classifwd), 'classif', "Layer does not exist in this shapefile") self.assertTrue( testutils.compareVectorFile(self.classifwd, self.classifout, 'coordinates', 'polygon', "ESRI Shapefile"), "Generated shapefile vector does not fit with shapefile reference file" )
def gestion_echantillons(Fileconfig, ouputPath): cfg = read_config_file(Fileconfig) # Global parameters res = cfg.parameters.resolution area_thresh = int(res) * int(res) pix_thresh = cfg.parameters.spatialThreshold # Clip input vector files if cfg.parameters.cut != '': if isinstance(cfg.parameters.cut, config.Sequence): for sourceToCut in cfg.parameters.cut: clipFile(cfg, ouputPath, sourceToCut) else: clipFile(cfg, ouputPath, cfg.parameters.cut) buff = False sources = get_sources(cfg) samples_shapefile_source = {} os.system("mkdir {}/{}".format(ouputPath, 'final')) for source in sources: if source in cfg.globalPath or (source.split('_')[0] in cfg.globalPath and source.split('_')[1] in cfg.globalPath): for classe in sources[source]: try: Buffer = cfg.Nomenclature[classe].Buffer buff = True except: pass if not '_' in source: print( 'Traitement de la base de données {} pour la classe {}' .format(source, classe)) if buff: outfile_area = gestionSamplesClasse( cfg, classe, source, ouputPath, res, area_thresh, pix_thresh, Buffer) else: outfile_area = gestionSamplesClasse( cfg, classe, source, ouputPath, res, area_thresh, pix_thresh) # gestion finale du fichier gestionFichierFinal(samples_shapefile_source, outfile_area, ouputPath, source, classe) else: complexDataSets = [] for sourceBD in source.split('_'): print( 'Traitement de la base de données {} pour la classe {}' .format(sourceBD, classe)) try: Buffer = cfg.Nomenclature[classe].Buffer[ source.split('_').index(sourceBD)] except: Buffer = None if buff and Buffer != 'None': outfile_area = gestionSamplesClasse( cfg, classe, sourceBD, ouputPath, res, area_thresh, pix_thresh, Buffer) else: outfile_area = gestionSamplesClasse( cfg, classe, sourceBD, ouputPath, res, area_thresh, pix_thresh) if outfile_area is not None: complexDataSets.append([sourceBD, outfile_area]) # intersection des jeux de données try: priorSource = cfg.Nomenclature[classe].PrioTheme if len([ x for x in complexDataSets if priorSource == x[0] ]) != 0: priorPath = [ x for x in complexDataSets if priorSource == x[0] ][0][1] secondPath = [ x for x in complexDataSets if priorSource != x[0] ][0][1] secondSource = [ x for x in complexDataSets if priorSource != x[0] ][0][0] if cfg.parameters.landCoverField not in vf.getFields( priorPath): print('No landcover field in {} data source'. format(priorSource)) else: print( "the priority source {} not present in sources list" .format()) if (priorPath is not None) and (secondPath is not None): intersectFilename = ouputPath + '/inter_' + priorSource + '_' + secondSource + '_' + classe + '.shp' #Intersection.intersection(priorPath, secondPath, intersectFilename) command = 'python /home/thierion/Documents/OSO/Dev/vector_tools/IntersectionQGIS.py {} {} {}'.\ format(priorPath, secondPath, intersectFilename) os.system(command) else: # pas d'intersection possible if secondPath is None: intersectFilename = priorPath else: "This case is not yet managed" sys.exit(-1) # gestion des champs # suppression des champs + génération Aire fieldList = vf.getFields(intersectFilename) idxLC = fieldList.index(cfg.parameters.landCoverField) for field in fieldList: if fieldList.index(field) != idxLC: DeleteField.deleteField( intersectFilename, field) AddFieldID.addFieldID(intersectFilename) AddFieldArea.addFieldArea(intersectFilename, area_thresh) samples_shapefile_source = gestionFichierFinal(samples_shapefile_source, intersectFilename, \ ouputPath, source, classe) except: for dataset in complexDataSets: samples_shapefile_source = gestionFichierFinal(samples_shapefile_source, dataset[1], \ ouputPath, dataset[0], classe) buff = False else: print( "No Path for source {} provided while required for classes {}". format(source, sources[source])) # Fusion des echantillons des différents classes pour une source donnée dataSourcePriorities = {} listpriorities = list(cfg.parameters.priorities) maskToMerge = [] outfilemergemask = ouputPath + '/final/' + cfg.parameters.samplesFileName + '_masks.shp' for keysource in samples_shapefile_source: outfilemerge = ouputPath + '/final/' + cfg.parameters.samplesFileName + '_' + keysource + '.shp' # séparer les couches linéaire de masquage / les couches inexistantes (pas d'échantillons) listToMerge = [] for src in samples_shapefile_source[keysource]: if len(samples_shapefile_source[keysource]) != 0: if src is not None: if 'mask' not in src: listToMerge.append(src) else: maskToMerge.append(src) # Merge des classes par source if len(listToMerge) != 0: MergeFiles.mergeVectors(listToMerge, outfilemerge) elif len(listToMerge) == 1: vf.copyShapefile(listToMerge[0], outfilemerge) else: pass # Decoupage avec la grille : cas du parametre areaThresh if cfg.parameters.areaThresh != '': areaT = int(sqrt(float(cfg.parameters.areaThresh))) * 100. if not isinstance(cfg.parameters.sourcesAreaThresh, config.Sequence): sourcesAreaThresh = [cfg.parameters.sourcesAreaThresh] if keysource in sourcesAreaThresh: outgrid = outfilemerge[:-4] + '_grid.shp' outgridbuff = outgrid[:-4] + '_buff.shp' outfilemergeDiff = outfilemerge[:-4] + 'grid_{}ha.shp'.format( cfg.parameters.areaThresh) CreateGrid.create_grid(outfilemerge, outgrid, areaT) BufferOgr.bufferPoly(outgrid, outgridbuff, 10) command = 'python /home/thierion/Documents/OSO/Dev/vector_tools/DifferenceQGIS.py {} {} {} {}'.\ format(outfilemerge, outgridbuff, True, outfilemergeDiff) os.system(command) outfilemerge = outfilemergeDiff # tri des chemins en fonction des priorités pour les opérations de différence if '_' not in keysource: idx = listpriorities.index(keysource) dataSourcePriorities[idx] = outfilemerge for keysource in samples_shapefile_source: if '_' in keysource: outfilemerge = ouputPath + '/final/' + cfg.parameters.samplesFileName + '_' + keysource + '.shp' idx1 = listpriorities.index(keysource.split('_')[0]) idx2 = listpriorities.index(keysource.split('_')[1]) if idx1 < idx2: if idx1 not in list(dataSourcePriorities.keys()): dataSourcePriorities[idx1] = outfilemerge else: dataSourcePriorities[idx1 + 0.5] = outfilemerge else: if idx2 not in list(dataSourcePriorities.keys()): dataSourcePriorities[idx2] = outfilemerge else: dataSourcePriorities[idx2 + 0.5] = outfilemerge orderedSourcesPaths = [ value for (key, value) in sorted(dataSourcePriorities.items()) ] orderedSourcesPaths.reverse() # Merge des linéaires de masquage if len(maskToMerge) != 0: MergeFiles.mergeVectors(maskToMerge, outfilemergemask) # Différence + masquage final nbfiles = len(orderedSourcesPaths) indfile = 0 outpathList = [] if nbfiles != 1: while indfile < nbfiles - 1: if indfile == 0: output = orderedSourcesPaths[indfile][:-4] + '_' + \ os.path.basename(orderedSourcesPaths[indfile + 1]).split('_')[1][:-4] + '.shp' outputmerge = orderedSourcesPaths[indfile][:-4] + '_' + \ os.path.basename(orderedSourcesPaths[indfile + 1]).split('_')[1][:-4] + '_merge.shp' outpathList.append([ orderedSourcesPaths[indfile], orderedSourcesPaths[indfile + 1], output, outputmerge ]) else: output = outpathList[indfile - 1][2][:-4] + '_' + \ os.path.basename(orderedSourcesPaths[indfile + 1]).split('_')[1][:-4] + '.shp' outputmerge = outpathList[indfile - 1][3][:-10] + '_' + \ os.path.basename(orderedSourcesPaths[indfile + 1]).split('_')[1][:-4] + '_merge.shp' outpathList.append([ outpathList[indfile - 1][3], orderedSourcesPaths[indfile + 1], output, outputmerge ]) indfile += 1 for listInOuput in outpathList: command = 'python /home/thierion/Documents/OSO/Dev/vector_tools/DifferenceQGIS.py {} {} {} {}'.\ format(listInOuput[0], listInOuput[1], True,listInOuput[2]) os.system(command) # shapeDifference.shapeDifference(listInOuput[0], listInOuput[1], listInOuput[2], False, None) MergeFiles.mergeVectors([listInOuput[1], listInOuput[2]], listInOuput[3]) subfinal = outpathList[len(outpathList) - 1][3] else: subfinal = orderedSourcesPaths[0] # Difference avec les masques (réseaux) if os.path.exists(outfilemergemask): subFinalSsReseaux = subfinal[:-4] + 'ssreseaux.shp' command = 'python /home/thierion/Documents/OSO/Dev/vector_tools/DifferenceQGIS.py {} {} {} {}'.\ format(subfinal, outfilemergemask, True, subFinalSsReseaux) os.system(command) #shapeDifference.shapeDifference(subfinal, outfilemergemask, subFinalSsReseaux, False, None) else: subFinalSsReseaux = subfinal # Harmonisation de la couche finale try: filefinal = ouputPath + '/final/echantillons_OSO_' + cfg.parameters.samplesFileName + '.shp' checkGeometryAreaThreshField.checkGeometryAreaThreshField( subFinalSsReseaux, area_thresh, pix_thresh, filefinal) print("Les échantillons de classification pour la zone {}"\ " ont été produits dans la couche {}".format(cfg.parameters.samplesFileName, filefinal)) except: print("Un problème de copie a été identifié") try: vf.RandomSelectionPolygons(filefinal, cfg.parameters.landCoverField, 1, ouputPath + '/final/', 0.7) print("Les échantillons ont été séparés en deux groupes de validation {} et d'apprentissage {}"\ .format(filefinal[:-4] + '_seed0_val.shp', filefinal[:-4] + '_seed0_learn.shp')) except: print("Problème de tirage aléatoire")
def intersection(file1, file2, outfile): ds1 = vf.openToRead(file1) ds2 = vf.openToRead(file2) layer1 = ds1.GetLayer() layer2 = ds2.GetLayer() if layer1.GetSpatialRef().GetAttrValue( "AUTHORITY", 1) == layer2.GetSpatialRef().GetAttrValue("AUTHORITY", 1): srsObj = layer1.GetSpatialRef() else: print("second shapefile must have the same projection than the first shapefile (EPSG:{} vs. EPSG:{})"\ .format(layer1.GetSpatialRef().GetAttrValue("AUTHORITY", 1), layer2.GetSpatialRef().GetAttrValue("AUTHORITY", 1))) sys.exit(-1) outDriver = ogr.GetDriverByName("ESRI Shapefile") # Find geometry of the intersection if defineIntersectGeometry(layer1, layer2) in ['POLYGON', 'MULTIPOLYGON']: #if exists, delete it if os.path.exists(outfile): outDriver.DeleteDataSource(outfile) outDataSource = outDriver.CreateDataSource(outfile) #Creates the spatial reference of the output layer outLayer = outDataSource.CreateLayer("intersect", srsObj, geom_type=ogr.wkbPolygon) else: print("This program only produces POLYGONS intersection") # gestion des champs du premier layer inLayerDefn = layer1.GetLayerDefn() for i in range(0, inLayerDefn.GetFieldCount()): fieldDefn = inLayerDefn.GetFieldDefn(i) outLayer.CreateField(fieldDefn) # gestion des champs du second layer inLayerDefn = layer2.GetLayerDefn() for i in range(0, inLayerDefn.GetFieldCount()): fieldDefn = inLayerDefn.GetFieldDefn(i) outLayer.CreateField(fieldDefn) # Liste des champs de trois entités listfieldin1 = vf.getFields(layer1) listfieldin2 = vf.getFields(layer2) listfieldout = vf.getFields(outLayer) layer1.ResetReading() layer2.ResetReading() for feature1 in layer1: geom1 = feature1.GetGeometryRef() for feature2 in layer2: geom2 = feature2.GetGeometryRef() # select only the intersections if geom2.Intersects(geom1): intersection = geom2.Intersection(geom1) dstfeature = ogr.Feature(outLayer.GetLayerDefn()) dstfeature.SetGeometry(intersection) #gestion des champs i = 0 j = 0 k = 0 while i < len(listfieldout): while j < len(listfieldin1): dstfeature.SetField(listfieldout[i], feature1.GetField(listfieldin1[j])) i += 1 j += 1 while k < len(listfieldin2): dstfeature.SetField(listfieldout[i], feature2.GetField(listfieldin2[k])) i += 1 k += 1 outLayer.CreateFeature(dstfeature) dstfeature.Destroy() layer2.ResetReading() outLayer = None outDataSource = None
def generalizeVector( path, grasslib, vector, paramgene, method, mmu="", ncolumns="cat", out="", outformat="ESRI_Shapefile", debulvl="info", epsg="2154", logger=logger, ): timeinit = time.time() if out == "": out = os.path.splitext(vector)[0] + "_%s.shp" % (method) if not os.path.exists(out) and os.path.exists(vector): logger.info("Generalize (%s) of vector file %s" % (method, os.path.basename(vector))) # local environnement layer = os.path.basename(os.path.splitext(vector)[0]) localenv = os.path.join(path, "tmp%s" % (layer)) if os.path.exists(localenv): shutil.rmtree(localenv) os.mkdir(localenv) init_grass(localenv, grasslib, debulvl, epsg) # remove non "cat" fields for field in vf.getFields(vector): if field != "cat": df.deleteField(vector, field) gscript.run_command( "v.in.ogr", flags="e", input=vector, output=layer, columns=["id", ncolumns], overwrite=True, ) try: gscript.run_command( "v.generalize", input="%s@datas" % (layer), method=method, threshold="%s" % (paramgene), output="generalize", overwrite=True, ) except: raise Exception( "Something goes wrong with generalization parameters (method '%s' or input data)" % (method)) if mmu != "": gscript.run_command( "v.clean", input="generalize", output="cleanarea", tool="rmarea", thres=mmu, type="area", ) gscript.run_command("v.out.ogr", input="cleanarea", output=out, format=outformat) else: gscript.run_command("v.out.ogr", input="generalize", output=out, format=outformat) timedouglas = time.time() logger.info(" ".join([ " : ".join([ "Douglas simplification and exportation", str(timedouglas - timeinit), ]), "seconds", ])) # clean geometries tmp = os.path.join(localenv, "tmp.shp") checkGeom.checkGeometryAreaThreshField(out, 1, 0, tmp) for ext in [".shp", ".dbf", ".shx", ".prj"]: shutil.copy( os.path.splitext(tmp)[0] + ext, os.path.splitext(out)[0] + ext) shutil.rmtree(localenv) else: logger.info("Output vector file already exists") return out
def checkGeometryAreaThreshField(shapefile, pixelArea, pix_thresh, outshape="", outformat="ESRI shapefile"): tmpfile = [] if outshape == "": outshape = shapefile if os.path.splitext(outshape)[1] == ".shp": outformat = "ESRI shapefile" elif os.path.splitext(outshape)[1] == ".sqlite": outformat = "SQlite" else: print("Output format not managed") sys.exit() # Empty geometry identification try: outShapefileGeom, _ = vf.checkEmptyGeom(shapefile, outformat) if shapefile != outshape: tmpfile.append(outShapefileGeom) print('Check empty geometries succeeded') except Exception as e: print('Check empty geometries did not work for the following error :') print(e) # suppression des doubles géométries DeleteDuplicateGeometriesSqlite.deleteDuplicateGeometriesSqlite( outShapefileGeom) # Suppression des multipolygons shapefileNoDupspoly = outShapefileGeom[:-4] + 'spoly' + '.shp' tmpfile.append(shapefileNoDupspoly) try: MultiPolyToPoly.multipoly2poly(outShapefileGeom, shapefileNoDupspoly) print( 'Conversion of multipolygons shapefile to single polygons succeeded' ) except Exception as e: print( 'Conversion of multipolygons shapefile to single polygons did not work for the following error :' ) print(e) # recompute areas try: AddFieldArea.addFieldArea(shapefileNoDupspoly, pixelArea) except Exception as e: print('Add an Area field did not work for the following error :') print(e) # Attribution d'un ID fieldList = vf.getFields(shapefileNoDupspoly) if 'ID' in fieldList: DeleteField.deleteField(shapefileNoDupspoly, 'ID') AddFieldID.addFieldID(shapefileNoDupspoly) else: AddFieldID.addFieldID(shapefileNoDupspoly) # Filter by Area try: SelectBySize.selectBySize(shapefileNoDupspoly, 'Area', pix_thresh, outshape) print( 'Selection by size upper {} pixel(s) succeeded'.format(pix_thresh)) except Exception as e: print('Selection by size did not work for the following error :') print(e) if pix_thresh > 0: try: SelectBySize.selectBySize(shapefileNoDupspoly, 'Area', pix_thresh, outshape) print('Selection by size upper {} pixel(s) succeeded'.format( pix_thresh)) except Exception as e: print('Selection by size did not work for the following error :') print(e) elif pix_thresh < 0: print("Area threshold has to be positive !") sys.exit() # Check geometry vf.checkValidGeom(outshape, outformat) # delete tmp file for fileDel in tmpfile: basefile = os.path.splitext(fileDel)[0] os.system('rm {}.*'.format(basefile))
def conFieldRecode(shapefile, fieldin, fieldout, valin, valout): # open ds = ogr.Open(shapefile, 1) lyr = ds.GetLayer() # fields list fieldList = vf.getFields(lyr) try: indfield = fieldList.index(fieldin) except: raise Exception( "The field {} does not exist in the input shapefile".format( fieldin), "You must choose one of these existing fields : {}".format( ' / '.join(fieldList))) sys.exit(-1) # Field type inLayerDefn = lyr.GetLayerDefn() fieldTypeCode = inLayerDefn.GetFieldDefn(indfield).GetType() fieldType = inLayerDefn.GetFieldDefn(indfield).GetFieldTypeName( fieldTypeCode) if fieldout.lower() in [x.lower() for x in fieldList]: print( "Field '{}' already exists. Existing value of {} field will be changed !!!" .format(fieldout, fieldout)) else: try: new_field = ogr.FieldDefn(fieldout, ogr.OFTInteger) lyr.CreateField(new_field) print("Field '{}' created".format(fieldout)) except: print(("Error while creating field '{}'".format(fieldout))) sys.exit(-1) if fieldType != "String": lyr.SetAttributeFilter(fieldin + "=" + str(valin)) if lyr.GetFeatureCount() != 0: try: changeValueField(lyr, fieldout, valout) print("Field '{}' populated with {} value".format( fieldout, valout)) except: print("Error while populate field '{}'".format(fieldout)) sys.exit(-1) else: print("The value '{}' does not exist for the field '{}'".format( valin, fieldin)) else: lyr.SetAttributeFilter(fieldin + "=\'" + str(valin) + "\'") if lyr.GetFeatureCount() != 0: try: changeValueField(lyr, fieldout, valout) print("Field '{}' populated with {} value".format( fieldout, valout)) except: print("Error while populate field '{}'".format(fieldout)) sys.exit(-1) else: print("The value '{}' does not exist for the field '{}'".format( valin, fieldin)) ds.Destroy()
def check_region_shape(input_vector, output_vector, field, epsg, do_corrections, display=False): """ """ from iota2.Common import ServiceError from iota2.Common.FileUtils import cpShapeFile from iota2.VectorTools.vector_functions import getFields from iota2.VectorTools.vector_functions import getFieldType from iota2.VectorTools.vector_functions import get_vector_proj from iota2.VectorTools.vector_functions import checkEmptyGeom from iota2.VectorTools.vector_functions import checkValidGeom from iota2.VectorTools.DeleteDuplicateGeometriesSqlite import deleteDuplicateGeometriesSqlite area_threshold = 0.1 input_vector_fields = getFields(input_vector) errors = [] # check vector's projection vector_projection = get_vector_proj(input_vector) if not int(epsg) == int(vector_projection): error_msg = "{} projection ({}) incorrect".format( input_vector, vector_projection) errors.append(ServiceError.invalidProjection(error_msg)) # check vector's name name_check = vector_name_check(input_vector) if name_check is False: error_msg = "{} file's name not correct, it must start with an ascii letter".format( input_vector) errors.append(ServiceError.namingConvention(error_msg)) # check field if not field in input_vector_fields: errors.append(ServiceError.missingField(input_vector, field)) # check field's type label_field_type = getFieldType(input_vector, field) if not label_field_type is str: errors.append(ServiceError.fieldType(input_vector, field, str)) # geometries checks shape_no_empty_name = "no_empty.shp" shape_no_empty_dir = os.path.split(input_vector)[0] shape_no_empty = os.path.join(shape_no_empty_dir, shape_no_empty_name) shape_no_empty, empty_geom_number = checkEmptyGeom( input_vector, do_corrections=do_corrections, output_file=shape_no_empty) if empty_geom_number != 0: error_msg = "'{}' contains {} empty geometries".format( input_vector, empty_geom_number) errors.append(ServiceError.emptyGeometry(error_msg)) # remove duplicates features shape_no_duplicates_name = "no_duplicates.shp" shape_no_duplicates_dir = os.path.split(input_vector)[0] shape_no_duplicates = os.path.join(shape_no_duplicates_dir, shape_no_duplicates_name) shape_no_duplicates, duplicated_features = deleteDuplicateGeometriesSqlite( shape_no_empty, do_corrections, shape_no_duplicates, quiet_mode=True) if duplicated_features != 0: error_msg = "'{}' contains {} duplicated features".format( input_vector, duplicated_features) errors.append(ServiceError.duplicatedFeatures(error_msg)) # Check valid geometry shape_valid_geom_name = "valid_geom.shp" shape_valid_geom_dir = os.path.split(input_vector)[0] shape_valid_geom = os.path.join(shape_valid_geom_dir, shape_valid_geom_name) shape_valid_geom = output_vector if output_vector else shape_valid_geom input_valid_geom_shape = shape_no_multi if do_corrections else shape_no_duplicates cpShapeFile(input_valid_geom_shape.replace(".shp", ""), shape_valid_geom.replace(".shp", ""), extensions=[".prj", ".shp", ".dbf", ".shx"]) shape_valid_geom, invalid_geom, invalid_geom_corrected = checkValidGeom( shape_valid_geom, display=False) # remove features with None geometries none_geoms = remove_invalid_features(shape_valid_geom) invalid_geom += none_geoms if invalid_geom != 0: error_msg = "'{}' contains {} invalid geometries".format( input_vector, invalid_geom) errors.append(ServiceError.invalidGeometry(error_msg)) nb_too_small_geoms = len( get_geometries_by_area(input_vector, area=area_threshold, driver_name="ESRI Shapefile")) if nb_too_small_geoms != 0: errors.append( ServiceError.tooSmallRegion(input_vector, area_threshold, nb_too_small_geoms)) return errors
def DifferenceFiles(shp1, shp2): outShp = vf.copyShp(shp1, 'difference') fields = vf.getFields(shp1) ds1 = vf.openToRead(shp1) ds2 = vf.openToRead(shp2) lyr1 = ds1.GetLayer() lyr2 = ds2.GetLayer() layerDef = lyr1.GetLayerDefn() print(lyr2.GetFeatureCount()) for f1 in lyr1: lyr2.SetAttributeFilter(None) geom1 = f1.GetGeometryRef() centroid = geom1.Centroid() x = centroid.GetX() y = centroid.GetY() minX = x - float(distance) minY = y - float(distance) maxX = x + float(distance) maxY = y + float(distance) lyr2.SetSpatialFilterRect(float(minX), float(minY), float(maxX), float(maxY)) nbfeat2 = lyr2.GetFeatureCount() intersection = False listFID = [] copy = False for i in range(0, nbfeat2): ds3 = vf.openToRead(outShp) lyr3 = ds3.GetLayer() lyr3.SetSpatialFilterRect(float(minX), float(minY), float(maxX), float(maxY)) f2 = lyr2.GetFeature(i) print(str(f1.GetFID()) + " - " + str(i)) geom2 = f2.GetGeometryRef() if geom1.Intersect(geom2) == True: print("True") if geom1.Equal(geom2) == True: if vf.VerifyGeom(geom, lyr3) == False: vf.copyFeatInShp(f1, outShp) elif geom1.Equal(geom2) == False: newgeom = vf.Difference(geom1, geom2) newgeom2 = ogr.CreateGeometryFromWkb(newgeom.wkb) newgeom2 = geom1.Difference(geom2) #print newgeom2 newFeature = ogr.Feature(layerDef) newFeature.SetGeometry(newgeom2) for field in fields: newFeature.SetField(field, f1.GetField(field)) if vf.VerifyGeom(newgeom2, lyr3) == False: vf.copyFeatInShp(newFeature, outShp) newFeature.Destroy() elif geom1.Intersect(geom2) == False: print("False") if not vf.VerifyGeom(geom1, lyr3): vf.copyFeatInShp(f1, outShp) f2.Destroy() f1.Destroy() ds2 = vf.openToRead(shp2) lyr2 = ds2.GetLayer() ds3 = vf.openToWrite(outShp) lyr3 = ds3.GetLayer() for feat in lyr3: geom1 = feat.GetGeometryRef() centroid = geom1.Centroid() x = centroid.GetX() y = centroid.GetY() minX = x - float(distance) minY = y - float(distance) maxX = x + float(distance) maxY = y + float(distance) lyr2.SetSpatialFilterRect(float(minX), float(minY), float(maxX), float(maxY)) nbfeat2 = lyr2.GetFeatureCount() intersection = False listFID = [] copy = False for i in range(0, nbfeat2): f2 = lyr2.GetFeature(i) geom2 = f2.GetGeometryRef() if geom1.Intersect(geom2) == True: lyr3.DeleteFeature(feat.GetFID()) ds3.ExecuteSQL('REPACK ' + lyr3.GetName()) return outShp
def countByAtt(shpfile, field, storecsv="", val=None): ds = vf.openToRead(shpfile) fields = vf.getFields(shpfile) layer = ds.GetLayer() if val is None: for feature in layer: cl = feature.GetField(field) if cl not in classes: classes.append(cl) else: classes.append(val) layerDfn = layer.GetLayerDefn() fieldTypeCode = layerDfn.GetFieldDefn(fields.index(field)).GetType() classes.sort() layer.ResetReading() totalarea = 0 if "POLYGON" in vf.getGeomTypeFromFeat(shpfile): for feat in layer: geom = feat.GetGeometryRef() if geom: if not math.isnan(geom.GetArea()): totalarea += geom.GetArea() stats = [] for cl in classes: if fieldTypeCode == 4: layer.SetAttributeFilter(field + " = \"" + str(cl) + "\"") featureCount = layer.GetFeatureCount() if "POLYGON" in vf.getGeomTypeFromFeat(shpfile): area = 0 for feat in layer: geom = feat.GetGeometryRef() if geom: area += geom.GetArea() partcl = area / totalarea * 100 if storecsv == "" or storecsv is None: print( "Class # %s: %s features and a total area of %s (rate : %s)" % (str(cl), str(featureCount), str(area), str(round(partcl, 2)))) stats.append([cl, featureCount, area, partcl]) else: print("Class # %s: %s features" % (str(cl), str(featureCount))) stats.append([cl, featureCount]) layer.ResetReading() else: layer.SetAttributeFilter(field + " = " + str(cl)) featureCount = layer.GetFeatureCount() if "POLYGON" in vf.getGeomTypeFromFeat(shpfile): area = 0 for feat in layer: geom = feat.GetGeometryRef() if geom: area += geom.GetArea() partcl = area / totalarea * 100 if storecsv == "" or storecsv is None: print( "Class # %s: %s features and a total area of %s (rate : %s)" % (str(cl), str(featureCount), str(area), str(round(partcl, 2)))) stats.append([cl, featureCount, area, partcl]) else: print("Class # %s: %s features" % (str(cl), str(featureCount))) stats.append([cl, featureCount]) layer.ResetReading() if storecsv != "" and storecsv is not None: with open(storecsv, "w") as f: writer = csv.writer(f) writer.writerows(stats) return stats
def zonalstats(path, rasters, params, output, paramstats, classes="", bufferDist=None, nodata=0, gdalpath="", systemcall=False, gdalcachemax="9000", logger=LOGGER): """Compute zonal statistitics (descriptive and categorical) on multi-band raster or multi-rasters based on Point (buffered or not) or Polygon zonal vector Parameters ---------- path : string working directory rasters : list list of rasters to analyse params : list list of fid list and vector file output : vector file (sqlite, shapefile and geojson) vector file to store statistitics paramstats : list list of statistics to compute (e.g. {1:'stats', 2:'rate'}) - paramstats = {1:"rate", 2:"statsmaj", 3:"statsmaj", 4:"stats", 2:stats_cl} - stats : mean_b, std_b, max_b, min_b - statsmaj : meanmaj, stdmaj, maxmaj, minmaj of majority class - rate : rate of each pixel value (classe names) - stats_cl : mean_cl, std_cl, max_cl, min_cl of one class - val : value of corresponding pixel (only for Point geometry and without other stats) classes : nomenclature file nomenclature bufferDist : int in case of point zonal vector : buffer size gdalpath : string path of gdal binaries (for system execution) systemcall : boolean if True, wrapped raster are stored in working dir gdalcachemax : string gdal cache for wrapping operation (in Mb) """ LOGGER.info("Begin to compute zonal statistics for vector file %s" % (output)) if systemcall and not gdalpath: LOGGER.info( "Please provide gdal binaries path when systemcall is set to true") sys.exit() if os.path.exists(output): return # Get bands or raster number if len(rasters) != 1: nbbands = len(rasters) else: nbbands = fut.getRasterNbands(rasters[0]) # Prepare and check validity of statistics methods and input raster paramstats = checkmethodstats(rasters, paramstats, nbbands) # Get vector file and FID list if len(params) > 1: vector, idvals = params else: vector = params[0][0] idvals = params[0][1] # if no vector subsetting (all features) fullfid = getFidList(vector) if not idvals: idvals = fullfid novals = [] else: novals = [x for x in fullfid if x not in idvals] # vector open and iterate features and/or buffer geom vectorname = os.path.splitext(os.path.basename(vector))[0] vectorgeomtype = vf.getGeomType(vector) vectorbuff = None # Prepare schema of output geopandas dataframe (geometry type and columns formatting) schema = setPandasSchema(paramstats, vectorgeomtype, bufferDist) # Buffer Point vector file if bufferDist and vectorgeomtype in (1, 4, 1001, 1004): vectorbuff = os.path.join(path, vectorname + "buff.shp") _ = bfo.bufferPoly(vector, vectorbuff, bufferDist=bufferDist) vectorgeomtype = vf.getGeomType(vectorbuff) # Store input vector in output geopandas dataframe vectgpad = gpad.read_file(vector) # Prepare statistics columns of output geopandas dataframe stats = definePandasDf(vectgpad, idvals, paramstats, classes) # Iterate FID list dataset = vf.openToRead(vector) lyr = dataset.GetLayer() if "fid" in [x.lower() for x in vf.getFields(vector)]: raise ValueError( "FID field not allowed. This field name is reserved by gdal binary." ) for idval in idvals: if vectorgeomtype in (1, 4, 1001, 1004): if 'val' in list(paramstats.values()): lyr.SetAttributeFilter("FID=" + str(idval)) for feat in lyr: geom = feat.GetGeometryRef() if geom: if vectorgeomtype == 4: point = geom.GetGeometryRef(0) xpt = point.GetX() ypt = point.GetY() else: xpt, ypt, _ = geom.GetPoint() # Switch to buffered vector (Point and bufferDist) if bufferDist: if vectorbuff: vector = vectorbuff # creation of wrapped rasters success, bands, err = extractRasterArray(rasters, paramstats, vector, vectorgeomtype, idval, gdalpath, gdalcachemax, systemcall, path) if success: if 'val' in list(paramstats.values()): stats = extractPixelValue(rasters, bands, paramstats, xpt, ypt, stats, idval) else: stats = computeStats(bands, paramstats, stats, idval, nodata) else: print( "gdalwarp problem for feature %s (%s) : statistic computed with rasterio" % (idval, err)) # Prepare columns name and format of output dataframe if "rate" in list(paramstats.values()) and classes != "": stats, schema = formatDataFrame(stats, schema, novals, True, classes) else: stats, schema = formatDataFrame(stats, schema, novals) # exportation dataframeExport(stats, output, schema) LOGGER.info("End to compute zonal statistics for vector file %s" % (output))