Beispiel #1
0
def clipVectorfile(path,
                   vector,
                   clipfile,
                   clipfield="",
                   clipvalue="",
                   outpath="",
                   prefix="",
                   debulvl="info",
                   logger=logger):

    timeinit = time.time()

    if outpath == "":
        out = os.path.join(os.path.dirname(vector),
                           "%s_%s.shp" % (prefix, str(clipvalue)))
    else:
        out = os.path.join(outpath, "%s_%s.shp" % (prefix, str(clipvalue)))

    epsgin = vf.get_vector_proj(vector)
    if vf.get_vector_proj(clipfile) != epsgin:
        logger.error(
            "Land cover vector file and clip file projections are different please provide a clip file with same projection as Land cover file (EPSG = %s)"
            % (epsgin))
        sys.exit(-1)

    # clean geometries
    tmp = os.path.join(path, "tmp.shp")
    checkGeom.checkGeometryAreaThreshField(vector, 1, 0, tmp)

    for ext in [".shp", ".dbf", ".shx", ".prj"]:
        shutil.copy(
            os.path.splitext(tmp)[0] + ext,
            os.path.splitext(vector)[0] + ext)

    if not os.path.exists(out):
        if clipfile is not None:
            logger.info("Clip vector file %s with %s (%s == %s)" % (
                os.path.basename(vector),
                os.path.basename(clipfile),
                clipfield,
                clipvalue,
            ))
            print("Clip vector file %s with %s (%s == %s)" % (
                os.path.basename(vector),
                os.path.basename(clipfile),
                clipfield,
                clipvalue,
            ))

            # local environnement
            localenv = os.path.join(path, "tmp%s" % (str(clipvalue)))

            if os.path.exists(localenv):
                shutil.rmtree(localenv)
            os.mkdir(localenv)

            for ext in [".shp", ".dbf", ".shx", ".prj"]:
                shutil.copy(os.path.splitext(clipfile)[0] + ext, localenv)

            clipfile = os.path.join(localenv, os.path.basename(clipfile))

            if vf.getNbFeat(clipfile) != 1:
                clip = os.path.join(localenv, "clip.shp")
                layer = vf.getFirstLayer(clipfile)
                fieldType = vf.getFieldType(os.path.join(localenv, clipfile),
                                            clipfield)

                if fieldType == str:
                    command = (
                        "ogr2ogr -sql \"SELECT * FROM %s WHERE %s = '%s'\" %s %s"
                        % (layer, clipfield, clipvalue, clip, clipfile))
                    Utils.run(command)
                elif fieldType == int or fieldType == float:
                    command = 'ogr2ogr -sql "SELECT * FROM %s WHERE %s = %s" %s %s' % (
                        layer,
                        clipfield,
                        clipvalue,
                        clip,
                        clipfile,
                    )

                    Utils.run(command)
                else:
                    raise Exception("Field type %s not handled" % (fieldType))
            else:
                clip = os.path.join(path, clipfile)
                logger.info(
                    "'%s' shapefile has only one feature which will used to clip data"
                    % (clip))

            # clip
            clipped = os.path.join(localenv, "clipped.shp")

            command = "ogr2ogr -select cat -clipsrc %s %s %s" % (clip, clipped,
                                                                 vector)

            Utils.run(command)

        else:
            clipped = os.path.join(localenv, "merge.shp")

        timeclip = time.time()
        logger.info(" ".join([
            " : ".join(["Clip final shapefile",
                        str(timeclip - timeinit)]),
            "seconds",
        ]))

        # Delete duplicate geometries
        ddg.deleteDuplicateGeometriesSqlite(clipped)

        for ext in [".shp", ".shx", ".dbf", ".prj"]:
            shutil.copy(
                os.path.splitext(clipped)[0] + ext,
                os.path.join(localenv, "clean") + ext,
            )
            os.remove(os.path.splitext(clipped)[0] + ext)

        timedupli = time.time()
        logger.info(" ".join([
            " : ".join(
                ["Delete duplicated geometries",
                 str(timedupli - timeclip)]),
            "seconds",
        ]))

        # Check geom
        vf.checkValidGeom(os.path.join(localenv, "clean.shp"))

        # Add Field Area (hectare)
        afa.addFieldArea(os.path.join(localenv, "clean.shp"), 10000)

        for ext in [".shp", ".shx", ".dbf", ".prj"]:
            shutil.copy(os.path.join(localenv, "clean" + ext),
                        os.path.splitext(out)[0] + ext)

        shutil.rmtree(localenv)

        timeclean = time.time()
        logger.info(" ".join([
            " : ".join([
                "Clean empty geometries and compute areas (ha)",
                str(timeclean - timedupli),
            ]),
            "seconds",
        ]))

    else:

        logger.info("Output vector file '%s' already exists" % (out))
Beispiel #2
0
def gestionTraitementsClasse(cfg, layer, outfile, classe, ss_classe, source, res, area_thresh, pix_thresh, \
                             chp = None, value = None, buff = None):

    if ss_classe not in cfg.parameters.maskLineaire:
        if chp is not None:
            FileByClass.FileByClass(layer, chp, value, outfile)

        if os.path.exists(outfile):

            manageFieldShapefile(outfile, cfg.Nomenclature[classe].Code,
                                 area_thresh)

            # Verification de la géométrie
            vf.checkValidGeom(outfile)

            # suppression des doubles géométries
            outfile_ssdb = DeleteDuplicateGeometries.DeleteDupGeom(outfile)

            # Gestion du buffer linéaire / surfacique / Erosion des polygones
            ds = vf.openToRead(outfile_ssdb)
            lyr = ds.GetLayer()
            typeGeom = lyr.GetGeomType()
            lyr = None
            if buff == "None": buff = None
            # wkbLineString/wkbMultiLineString/wkbMultiLineString25D/wkbLineString25D
            if typeGeom in [2, 5, -2147483643, -2147483646
                            ] and buff is not None:
                outfile_buffline = outfile_ssdb[:-4] + 'buffline' + str(
                    buff) + '.shp'
                BufferOgr.bufferPoly(outfile_ssdb, outfile_buffline, int(buff))
                outfile_buff = outfile_buffline
            # wkbPolygon/wkbMultiPolygon/wkbPolygon25D/wkbMultiPolygon25D
            elif typeGeom in [3, 6, -2147483645, -2147483642
                              ] and buff is not None:
                outfile_buffsurf_tmp = outfile_ssdb[:-4] + 'buffsurf' + str(
                    buff) + '_tmp.shp'
                outfile_buffsurf = outfile_ssdb[:-4] + 'buffsurf' + str(
                    buff) + '.shp'
                BufferOgr.bufferPoly(outfile_ssdb, outfile_buffsurf_tmp,
                                     int(buff))
                BufferOgr.bufferPoly(outfile_buffsurf_tmp, outfile_buffsurf,
                                     -int(buff))
                outfile_buff = outfile_buffsurf
            else:
                outfile_buff = outfile_ssdb[:-4] + 'buffinv' + str(
                    res) + '.shp'
                BufferOgr.bufferPoly(outfile_ssdb, outfile_buff, -int(res))

            # Suppression des multipolygons
            outfile_spoly = outfile_buff[:-4] + 'spoly' + '.shp'
            MultiPolyToPoly.multipoly2poly(outfile_buff, outfile_spoly)

            # recalcul des superficies
            AddFieldArea.addFieldArea(outfile_spoly, area_thresh)

            # Selection en fonction de la surface des polygones
            outfile_area = outfile_spoly[:-4] + 'sup' + str(
                pix_thresh) + 'pix.shp'
            SelectBySize.selectBySize(outfile_spoly, 'Area', pix_thresh,
                                      outfile_area)

            # Verification de la géométrie
            vf.checkValidGeom(outfile_area)

            return outfile_area

        else:
            print(
                'Aucun échantillon pour la classe {} dans la base de données {}'
                .format(classe, source))
            return None

    else:
        if chp is not None:
            FileByClass.FileByClass(layer, chp, value, outfile)

        # Gestion du buffer linéaire
        ds = vf.openToRead(layer)
        lyr = ds.GetLayer()
        typeGeom = lyr.GetGeomType()
        lyr = None
        if buff == "None": buff = None
        if typeGeom in [2, 5, -2147483643, -2147483646] and buff is not None:
            outfile_buff_mask = outfile[:-4] + 'buffline' + str(
                buff) + '_mask.shp'
            BufferOgr.bufferPoly(outfile, outfile_buff_mask, int(buff))
            os.system("ls {}".format(outfile_buff_mask))
            return outfile_buff_mask
        else:
            print("la donnée n'est pas de type linéaire")
            return None
def checkGeometryAreaThreshField(shapefile,
                                 pixelArea,
                                 pix_thresh,
                                 outshape="",
                                 outformat="ESRI shapefile"):

    tmpfile = []

    if outshape == "":
        outshape = shapefile

    if os.path.splitext(outshape)[1] == ".shp":
        outformat = "ESRI shapefile"
    elif os.path.splitext(outshape)[1] == ".sqlite":
        outformat = "SQlite"
    else:
        print("Output format not managed")
        sys.exit()

    # Empty geometry identification
    try:
        outShapefileGeom, _ = vf.checkEmptyGeom(shapefile, outformat)
        if shapefile != outshape:
            tmpfile.append(outShapefileGeom)

        print('Check empty geometries succeeded')

    except Exception as e:
        print('Check empty geometries did not work for the following error :')
        print(e)

    # suppression des doubles géométries
    DeleteDuplicateGeometriesSqlite.deleteDuplicateGeometriesSqlite(
        outShapefileGeom)

    # Suppression des multipolygons
    shapefileNoDupspoly = outShapefileGeom[:-4] + 'spoly' + '.shp'
    tmpfile.append(shapefileNoDupspoly)
    try:
        MultiPolyToPoly.multipoly2poly(outShapefileGeom, shapefileNoDupspoly)
        print(
            'Conversion of multipolygons shapefile to single polygons succeeded'
        )
    except Exception as e:
        print(
            'Conversion of multipolygons shapefile to single polygons did not work for the following error :'
        )
        print(e)

    # recompute areas
    try:
        AddFieldArea.addFieldArea(shapefileNoDupspoly, pixelArea)
    except Exception as e:
        print('Add an Area field did not work for the following error :')
        print(e)

    # Attribution d'un ID
    fieldList = vf.getFields(shapefileNoDupspoly)
    if 'ID' in fieldList:
        DeleteField.deleteField(shapefileNoDupspoly, 'ID')
        AddFieldID.addFieldID(shapefileNoDupspoly)
    else:
        AddFieldID.addFieldID(shapefileNoDupspoly)

    # Filter by Area
    try:
        SelectBySize.selectBySize(shapefileNoDupspoly, 'Area', pix_thresh,
                                  outshape)
        print(
            'Selection by size upper {} pixel(s) succeeded'.format(pix_thresh))
    except Exception as e:
        print('Selection by size did not work for the following error :')
        print(e)

    if pix_thresh > 0:
        try:
            SelectBySize.selectBySize(shapefileNoDupspoly, 'Area', pix_thresh,
                                      outshape)
            print('Selection by size upper {} pixel(s) succeeded'.format(
                pix_thresh))
        except Exception as e:
            print('Selection by size did not work for the following error :')
            print(e)
    elif pix_thresh < 0:
        print("Area threshold has to be positive !")
        sys.exit()

    # Check geometry
    vf.checkValidGeom(outshape, outformat)

    # delete tmp file
    for fileDel in tmpfile:
        basefile = os.path.splitext(fileDel)[0]
        os.system('rm {}.*'.format(basefile))
Beispiel #4
0
def tilesRastersMergeVectSimp(path, tiles, out, grass, mmu, \
                              fieldclass, clipfile, fieldclip, valueclip, tileId, tileNamePrefix, tilesfolder, \
                              douglas, hermite, angle):

    timeinit = time.time()

    print("Production of vector file %s" %
          (os.path.splitext(out)[0] + str(valueclip)))

    # local environnement
    localenv = os.path.join(path, "tmp%s" % (str(valueclip)))
    if os.path.exists(localenv): shutil.rmtree(localenv)
    os.mkdir(localenv)

    # Find vector tiles concerned by the given zone
    listTilesFiles = getTilesFiles(clipfile, tiles, tilesfolder, tileId,
                                   tileNamePrefix, localenv, fieldclip,
                                   valueclip)

    # Merge rasters
    localListTilesFiles = []
    for tile in listTilesFiles:
        shutil.copy(tile, localenv)
        localListTilesFiles.append(
            os.path.join(localenv, os.path.basename(tile)))

    finalraster = mergeTileRaster(path, localListTilesFiles, fieldclip,
                                  valueclip, localenv)

    timemerge = time.time()
    print(" ".join(
        [" : ".join(["Merge Tiles", str(timemerge - timeinit)]), "seconds"]))

    # Raster vectorization and simplification
    outvect = os.path.join(localenv, finalraster[:-4] + '.shp')
    if os.path.exists(outvect): os.remove(outvect)
    vas.simplification(localenv, finalraster, grass, outvect, douglas, hermite,
                       mmu, angle)

    # Delete raster after vectorisation
    os.remove(finalraster)

    timevect = time.time()
    print(" ".join([
        " : ".join(
            ["Vectorisation and Simplification",
             str(timevect - timemerge)]), "seconds"
    ]))

    # Get clip shafile layer
    if clipfile is not None:
        for ext in ['.shp', '.dbf', '.shx', '.prj']:
            shutil.copy(os.path.splitext(clipfile)[0] + ext, localenv)

        clipfile = os.path.join(localenv, os.path.basename(clipfile))

        if vf.getNbFeat(os.path.join(localenv, clipfile)) != 1:
            clip = os.path.join(localenv, "clip.shp")
            layer = vf.getFirstLayer(clipfile)
            fieldType = vf.getFieldType(os.path.join(localenv, clipfile),
                                        fieldclip)

            if fieldType == str:
                command = "ogr2ogr -sql \"SELECT * FROM %s WHERE %s = \'%s\'\" %s %s"%(layer, \
                                                                                       fieldclip, \
                                                                                       valueclip, \
                                                                                       clip, \
                                                                                       clipfile)
                Utils.run(command)
            elif fieldType == int or fieldType == float:
                command = "ogr2ogr -sql \"SELECT * FROM %s WHERE %s = %s\" %s %s"%(layer, \
                                                                                   fieldclip, \
                                                                                   valueclip, \
                                                                                   clip, \
                                                                                   clipfile)
                Utils.run(command)
            else:
                raise Exception('Field type %s not handled' % (fieldType))
        else:
            clip = os.path.join(localenv, clipfile)
            print(
                "'%s' shapefile has only one feature which will used to clip data"
                % (clip))

        # clip
        clipped = os.path.join(localenv, "clipped.shp")
        command = "ogr2ogr -select cat -clipsrc %s %s %s"%(clip, \
                                                           clipped, \
                                                           outvect)
        Utils.run(command)

        for ext in ['.shp', '.dbf', '.shx', '.prj']:
            if os.path.exists(os.path.splitext(outvect)[0] + ext):
                os.remove(os.path.splitext(outvect)[0] + ext)
            if os.path.exists(os.path.splitext(clipfile)[0] + ext):
                os.remove(os.path.splitext(clipfile)[0] + ext)
            if os.path.exists(os.path.splitext(clip)[0] + ext):
                os.remove(os.path.splitext(clip)[0] + ext)

    else:
        clipped = os.path.join(localenv, "merge.shp")

    timeclip = time.time()
    print(" ".join([
        " : ".join(["Clip final shapefile",
                    str(timeclip - timevect)]), "seconds"
    ]))

    # Delete duplicate geometries
    ddg.deleteDuplicateGeometriesSqlite(clipped)

    for ext in [".shp", ".shx", ".dbf", ".prj"]:
        shutil.copy(
            os.path.splitext(clipped)[0] + ext,
            os.path.join(localenv, "clean") + ext)
        os.remove(os.path.splitext(clipped)[0] + ext)

    timedupli = time.time()
    print(" ".join([
        " : ".join(["Delete duplicated geometries",
                    str(timedupli - timeclip)]), "seconds"
    ]))

    # Input shapefile
    init_grass(path, grass)
    gscript.run_command("v.in.ogr",
                        flags="e",
                        input=os.path.join(localenv, "clean.shp"),
                        output="cleansnap",
                        snap="1e-07")

    # Rename column
    if fieldclass:
        gscript.run_command("v.db.renamecolumn",
                            map="cleansnap@datas",
                            column="cat_,%s" % (fieldclass))

    # Export shapefile
    outtmp = os.path.join(
        localenv,
        os.path.splitext(os.path.basename(out))[0] + str(valueclip) +
        os.path.splitext(os.path.basename(out))[1])

    if os.path.exists(outtmp): os.remove(outtmp)
    gscript.run_command("v.out.ogr",
                        flags="s",
                        input="cleansnap@datas",
                        output=outtmp,
                        format="ESRI_Shapefile")

    # Check geom
    vf.checkValidGeom(outtmp)

    # Add Field Area (hectare)
    afa.addFieldArea(outtmp, 10000)

    timeprodvect = time.time()
    print(" ".join([" : ".join(["Production of final shapefile geometry of %s"%(os.path.splitext(out)[0] + str(valueclip) + ext), \
                                str(timeprodvect - timeinit)]), "seconds"]))

    for ext in ['.shp', '.dbf', '.shx', '.prj']:
        shutil.copyfile(
            os.path.splitext(outtmp)[0] + ext,
            os.path.splitext(out)[0] + str(valueclip) + ext)
        if os.path.exists(os.path.splitext(outtmp)[0] + ext):
            os.remove(os.path.splitext(outtmp)[0] + ext)
        if os.path.exists(os.path.join(localenv, "clean%s" % (ext))):
            os.remove(os.path.join(localenv, "clean%s" % (ext)))
        if os.path.exists(os.path.join(localenv, "clipped%s" % (ext))):
            os.remove(os.path.join(localenv, "clipped%s" % (ext)))

    if os.path.exists(os.path.join(localenv, "grassdata")):
        shutil.rmtree(os.path.join(localenv, "grassdata"))
Beispiel #5
0
def check_region_shape(input_vector,
                       output_vector,
                       field,
                       epsg,
                       do_corrections,
                       display=False):
    """
    """
    from iota2.Common import ServiceError
    from iota2.Common.FileUtils import cpShapeFile
    from iota2.VectorTools.vector_functions import getFields
    from iota2.VectorTools.vector_functions import getFieldType
    from iota2.VectorTools.vector_functions import get_vector_proj
    from iota2.VectorTools.vector_functions import checkEmptyGeom
    from iota2.VectorTools.vector_functions import checkValidGeom
    from iota2.VectorTools.DeleteDuplicateGeometriesSqlite import deleteDuplicateGeometriesSqlite

    area_threshold = 0.1
    input_vector_fields = getFields(input_vector)
    errors = []

    # check vector's projection
    vector_projection = get_vector_proj(input_vector)
    if not int(epsg) == int(vector_projection):
        error_msg = "{} projection ({}) incorrect".format(
            input_vector, vector_projection)
        errors.append(ServiceError.invalidProjection(error_msg))

    # check vector's name
    name_check = vector_name_check(input_vector)
    if name_check is False:
        error_msg = "{} file's name not correct, it must start with an ascii letter".format(
            input_vector)
        errors.append(ServiceError.namingConvention(error_msg))

    # check field
    if not field in input_vector_fields:
        errors.append(ServiceError.missingField(input_vector, field))

    # check field's type
    label_field_type = getFieldType(input_vector, field)
    if not label_field_type is str:
        errors.append(ServiceError.fieldType(input_vector, field, str))

    # geometries checks
    shape_no_empty_name = "no_empty.shp"
    shape_no_empty_dir = os.path.split(input_vector)[0]
    shape_no_empty = os.path.join(shape_no_empty_dir, shape_no_empty_name)
    shape_no_empty, empty_geom_number = checkEmptyGeom(
        input_vector,
        do_corrections=do_corrections,
        output_file=shape_no_empty)
    if empty_geom_number != 0:
        error_msg = "'{}' contains {} empty geometries".format(
            input_vector, empty_geom_number)
        errors.append(ServiceError.emptyGeometry(error_msg))

    # remove duplicates features
    shape_no_duplicates_name = "no_duplicates.shp"
    shape_no_duplicates_dir = os.path.split(input_vector)[0]
    shape_no_duplicates = os.path.join(shape_no_duplicates_dir,
                                       shape_no_duplicates_name)

    shape_no_duplicates, duplicated_features = deleteDuplicateGeometriesSqlite(
        shape_no_empty, do_corrections, shape_no_duplicates, quiet_mode=True)
    if duplicated_features != 0:
        error_msg = "'{}' contains {} duplicated features".format(
            input_vector, duplicated_features)
        errors.append(ServiceError.duplicatedFeatures(error_msg))

    # Check valid geometry
    shape_valid_geom_name = "valid_geom.shp"
    shape_valid_geom_dir = os.path.split(input_vector)[0]
    shape_valid_geom = os.path.join(shape_valid_geom_dir,
                                    shape_valid_geom_name)
    shape_valid_geom = output_vector if output_vector else shape_valid_geom

    input_valid_geom_shape = shape_no_multi if do_corrections else shape_no_duplicates
    cpShapeFile(input_valid_geom_shape.replace(".shp", ""),
                shape_valid_geom.replace(".shp", ""),
                extensions=[".prj", ".shp", ".dbf", ".shx"])

    shape_valid_geom, invalid_geom, invalid_geom_corrected = checkValidGeom(
        shape_valid_geom, display=False)
    # remove features with None geometries
    none_geoms = remove_invalid_features(shape_valid_geom)
    invalid_geom += none_geoms

    if invalid_geom != 0:
        error_msg = "'{}' contains {} invalid geometries".format(
            input_vector, invalid_geom)
        errors.append(ServiceError.invalidGeometry(error_msg))

    nb_too_small_geoms = len(
        get_geometries_by_area(input_vector,
                               area=area_threshold,
                               driver_name="ESRI Shapefile"))
    if nb_too_small_geoms != 0:
        errors.append(
            ServiceError.tooSmallRegion(input_vector, area_threshold,
                                        nb_too_small_geoms))
    return errors
Beispiel #6
0
def check_ground_truth(input_vector,
                       output_vector,
                       data_field,
                       epsg,
                       do_corrections,
                       display=False):
    """
    """
    import os
    from iota2.Common.FileUtils import removeShape
    from iota2.Common.FileUtils import cpShapeFile
    from iota2.VectorTools import checkGeometryAreaThreshField
    from iota2.VectorTools.vector_functions import getFields
    from iota2.VectorTools.vector_functions import getFieldType
    from iota2.VectorTools.vector_functions import checkEmptyGeom
    from iota2.VectorTools.vector_functions import get_vector_proj
    from iota2.VectorTools.vector_functions import checkValidGeom
    from iota2.VectorTools.DeleteDuplicateGeometriesSqlite import deleteDuplicateGeometriesSqlite
    from iota2.VectorTools.MultiPolyToPoly import multipoly2poly
    from iota2.Common import ServiceError
    tmp_files = []
    input_vector_fields = getFields(input_vector)

    errors = []
    # check vector's projection
    vector_projection = get_vector_proj(input_vector)
    if not int(epsg) == int(vector_projection):
        error_msg = "{} projection ({}) incorrect".format(
            input_vector, vector_projection)
        errors.append(ServiceError.invalidProjection(error_msg))

    # check vector's name
    name_check = vector_name_check(input_vector)
    if name_check is False:
        error_msg = "file's name not correct, it must start with an ascii letter"
        errors.append(ServiceError.namingConvention(error_msg))

    # check field
    if not data_field in input_vector_fields:
        errors.append(ServiceError.missingField(input_vector, data_field))
    else:
        # check field's type
        label_field_type = getFieldType(input_vector, data_field)
        if not label_field_type is int:
            errors.append(ServiceError.fieldType(input_vector, data_field,
                                                 int))

    # geometries checks
    shape_no_empty_name = "no_empty.shp"
    shape_no_empty_dir = os.path.split(input_vector)[0]
    shape_no_empty = os.path.join(shape_no_empty_dir, shape_no_empty_name)
    shape_no_empty, empty_geom_number = checkEmptyGeom(
        input_vector,
        do_corrections=do_corrections,
        output_file=shape_no_empty)

    if empty_geom_number != 0:
        error_msg = "'{}' contains {} empty geometries".format(
            input_vector, empty_geom_number)
        if do_corrections:
            error_msg = "{} and they were removed".format(error_msg)
        errors.append(ServiceError.emptyGeometry(error_msg))
    tmp_files.append(shape_no_empty)

    # remove duplicates features
    shape_no_duplicates_name = "no_duplicates.shp"
    shape_no_duplicates_dir = os.path.split(input_vector)[0]
    shape_no_duplicates = os.path.join(shape_no_duplicates_dir,
                                       shape_no_duplicates_name)

    shape_no_duplicates, duplicated_features = deleteDuplicateGeometriesSqlite(
        shape_no_empty, do_corrections, shape_no_duplicates, quiet_mode=True)
    if duplicated_features != 0:
        error_msg = "'{}' contains {} duplicated features".format(
            input_vector, duplicated_features)
        if do_corrections:
            error_msg = "{} and they were removed".format(error_msg)
        errors.append(ServiceError.duplicatedFeatures(error_msg))
    tmp_files.append(shape_no_duplicates)

    # remove multipolygons
    shape_no_multi_name = "no_multi.shp"
    shape_no_multi_dir = os.path.split(input_vector)[0]
    shape_no_multi = os.path.join(shape_no_multi_dir, shape_no_multi_name)

    multipolygons_number = multipoly2poly(shape_no_duplicates, shape_no_multi,
                                          do_corrections)
    if multipolygons_number != 0:
        error_msg = "'{}' contains {} MULTIPOLYGON".format(
            input_vector, multipolygons_number)
        if do_corrections:
            error_msg = "{} and they were removed".format(error_msg)
        errors.append(ServiceError.containsMultipolygon(error_msg))
    tmp_files.append(shape_no_multi)

    # Check valid geometry
    shape_valid_geom_name = "valid_geom.shp"
    shape_valid_geom_dir = os.path.split(input_vector)[0]
    shape_valid_geom = os.path.join(shape_valid_geom_dir,
                                    shape_valid_geom_name)
    shape_valid_geom = output_vector if output_vector else shape_valid_geom

    input_valid_geom_shape = shape_no_multi if do_corrections else shape_no_duplicates
    cpShapeFile(input_valid_geom_shape.replace(".shp", ""),
                shape_valid_geom.replace(".shp", ""),
                extensions=[".prj", ".shp", ".dbf", ".shx"])

    shape_valid_geom, invalid_geom, invalid_geom_corrected = checkValidGeom(
        shape_valid_geom, display=False)
    # remove features with None geometries
    none_geoms = remove_invalid_features(shape_valid_geom)
    invalid_geom += none_geoms

    if invalid_geom != 0:
        error_msg = "'{}' contains {} invalid geometries".format(
            input_vector, invalid_geom)
        errors.append(ServiceError.invalidGeometry(error_msg))
    if output_vector is not None:
        tmp_files.append(shape_valid_geom)
    for tmp_file in tmp_files:
        if tmp_file is not input_vector and os.path.exists(tmp_file):
            removeShape(tmp_file.replace(".shp", ""),
                        [".prj", ".shp", ".dbf", ".shx"])
    if display:
        print("\n".join(errors))
    return errors
Beispiel #7
0
def foret_non_foret(chemin,
                    FileInit,
                    FileOut,
                    convex=0.7,
                    compa=0.4,
                    elong=2.5):

    os.chdir(chemin)
    print(os.path.join(chemin, FileInit))

    vf.checkValidGeom(FileInit)

    # Erosion puis dilatation du fichier initial pour ne garder que les contours (lissés) des forêts
    bo.bufferPoly(FileInit, 'tmp_Erosion20.shp', -20)
    bo.bufferPoly('tmp_Erosion20.shp', 'tmp_Dilatation20_poly.shp', 20)

    # Dilatation supplémentaire pour récupérer les objets qui disparaissent suite à l'ouverture par selection spatiale
    bo.bufferPoly('tmp_Dilatation20_poly.shp', 'tmp_Extra_Dila_poly.shp', 20)

    mpp.multipoly2poly('tmp_Dilatation20_poly.shp', 'tmp_Dilatation20.shp')
    mpp.multipoly2poly('tmp_Extra_Dila_poly.shp', 'tmp_Extra_Dila.shp')

    # Différentiation Forêt / Non-Forêt
    # Soustraction de l'ouverture par rapport au fichier Forêt non différencier
    #os.system('python DifferenceQGIS.py ' + FileInit + ' tmp_Dilatation20.shp True tmp_Non_foret_temp_poly.shp')
    sd.shapeDifference(FileInit, 'tmp_Dilatation20.shp',
                       'tmp_Non_foret_temp_poly.shp', False, None)
    # Transformation des multipolygones en polygones simples
    mpp.multipoly2poly('tmp_Non_foret_temp_poly.shp', 'tmp_Non_Foret_temp.shp')

    # Elimination des résidus de la symétries pour obtenir les vrais contours de la forêt
    NF = vf.openToWrite('tmp_Non_Foret_temp.shp')
    ED = vf.openToRead('tmp_Extra_Dila.shp')
    NFLayer = NF.GetLayer()
    EDLayer = ED.GetLayer()
    for fil in EDLayer:
        gfil = fil.GetGeometryRef()
        for f in NFLayer:
            g = f.GetGeometryRef()
            fID = f.GetFID()
            if gfil.Contains(g):
                NFLayer.DeleteFeature(fID)
        NFLayer.ResetReading()
    NF.Destroy()
    ED.Destroy()
    NFLayer = None
    EDLayer = None

    # Soustraction de la non-forêt par rapport au fichier initial, pour obtenir les vrais contours de la forêt
    sd.shapeDifference(FileInit, 'tmp_Non_Foret_temp.shp',
                       'tmp_Foret_temp_poly.shp', False, None)

    vf.checkValidGeom('tmp_Foret_temp_poly.shp')
    # Transformation des multipolygones en polygones simples
    mpp.multipoly2poly('tmp_Foret_temp_poly.shp', 'tmp_Foret_temp.shp')

    # Tri des forêts, si inférieure à 5000m2, classée en non_forêt
    F = vf.openToWrite('tmp_Foret_temp.shp')
    FLayer = F.GetLayer()
    for f in FLayer:
        g = f.GetGeometryRef()
        fID = f.GetFID()
        if g.GetArea() < 5000:
            FLayer.DeleteFeature(fID)
    F.Destroy()
    FLayer = None

    vf.checkValidGeom('tmp_Foret_temp.shp')

    # Soustraction des forêts du fichier original pour récupérer les bosquets et boqueteaux dans la couche non-forêt
    sd.shapeDifference(FileInit, 'tmp_Foret_temp.shp',
                       'tmp_Non_foret_full_poly.shp', False, None)

    vf.checkValidGeom('tmp_Non_foret_full_poly.shp')

    # Transformation des multipolygones en polygones simples
    mpp.multipoly2poly('tmp_Non_foret_full_poly.shp', 'tmp_Non_Foret_full.shp')
    vf.checkValidGeom('tmp_Non_Foret_full.shp')

    # Affinage Non-Forêt
    # Calcul des champs de discrimination Elongation, Convexité et Compacité
    addDiscriminationFields('tmp_Non_Foret_full.shp')
    # Ajout d'un champs classe qui détermine la classe des polygones en fonction des champs discriminants
    addClassAHF('tmp_Non_Foret_full.shp', convex, compa, elong)

    # Post-processing
    # Si un polygone est classé en forêt est connecté à une plus grande forêt, fusion des deux polygones en une seule et même forêt
    F = vf.openToRead('tmp_Foret_temp.shp')
    FL = F.GetLayer()
    NF = vf.openToWrite('tmp_Non_Foret_full.shp')
    NFL = NF.GetLayer()
    for foret in FL:
        gforet = foret.GetGeometryRef()
        for nonforet in NFL:
            fID = nonforet.GetFID()
            gnonforet = nonforet.GetGeometryRef()
            if gnonforet.Distance(gforet) == 0 and (
                    nonforet.GetField('ClassAHF') == 'Foret'
                    or nonforet.GetField('ClassAHF') == 'AutreAHF'):
                NFL.DeleteFeature(fID)
        NFL.ResetReading()
    F.Destroy()
    NF.Destroy()
    FL = None
    NFL = None

    vf.checkValidGeom('tmp_Non_Foret_full.shp')

    # Différence entre le fichier imitial et les nouvelles forêts
    sd.shapeDifference(FileInit, 'tmp_Non_Foret_full.shp',
                       'tmp_Foret_full_poly.shp', False, None)
    vf.checkValidGeom('tmp_Foret_full_poly.shp')
    # Transformation des multipolygones en polygones simples
    mpp.multipoly2poly('tmp_Foret_full_poly.shp', 'tmp_Foret_full.shp')

    # Ajout du champs classe pour les forêts
    F = vf.openToWrite('tmp_Foret_full.shp')
    FL = F.GetLayer()
    new_field = ogr.FieldDefn('ClassAHF', ogr.OFTString)
    FL.CreateField(new_field)
    for f in FL:
        f.SetField('ClassAHF', 'Foret')
        FL.SetFeature(f)
    F.Destroy()
    FL = None

    # Fusion du fichier de forêts avec le fichier de non-forêt
    mf.mergeVectors(['tmp_Foret_full.shp', 'tmp_Non_Foret_full.shp'], FileOut)

    # Suppression des fichiers intermédiaires
    os.system('rm tmp_*')