Пример #1
0
def shape_reference_vector(ref_vector: str, output_name: str) -> str:
    """
    modify reference vector (add field, rename...)
    Parameters
    ----------
    ref_vector : string
    output_name : string
    Return
    ------
    string
    """
    import os
    from iota2.Common.Utils import run
    from iota2.Common import FileUtils as fut
    from iota2.VectorTools.AddField import addField
    path, _ = os.path.split(ref_vector)

    tmp = os.path.join(path, output_name + "_TMP")
    fut.cpShapeFile(ref_vector.replace(".shp", ""), tmp,
                    [".prj", ".shp", ".dbf", ".shx"])
    addField(tmp + ".shp", "region", "1", str)
    addField(tmp + ".shp", "seed_0", "learn", str)
    cmd = (f"ogr2ogr -dialect 'SQLite' -sql 'select GEOMETRY,seed_0, "
           f"region, CODE as code from {output_name}_TMP' "
           f"{path}/{output_name}.shp {tmp}.shp")
    run(cmd)

    os.remove(tmp + ".shp")
    os.remove(tmp + ".shx")
    os.remove(tmp + ".prj")
    os.remove(tmp + ".dbf")
    return path + "/" + output_name + ".shp"
Пример #2
0
def generate_shape_tile(tiles: List[str], pathWd: str, output_path: str,
                        proj: int) -> None:
    """generate tile's envelope with priority management

    Parameters
    ----------
    tiles : list
        list of tiles envelopes to generate
    pathOut : str
        output directory
    pathWd : str
        working directory
    output_path : str
        iota2 output directory
    proj : int
        epsg code of target projection
    """
    pathOut = os.path.join(output_path, "envelope")
    if not os.path.exists(pathOut):
        os.mkdir(pathOut)
    featuresPath = os.path.join(output_path, "features")

    cMaskName = "MaskCommunSL"
    for tile in tiles:
        if not os.path.exists(featuresPath + "/" + tile):
            os.mkdir(featuresPath + "/" + tile)
            os.mkdir(featuresPath + "/" + tile + "/tmp")
    commonDirectory = pathOut + "/commonMasks/"
    if not os.path.exists(commonDirectory):
        os.mkdir(commonDirectory)

    common = [
        featuresPath + "/" + Ctile + "/tmp/" + cMaskName + ".tif"
        for Ctile in tiles
    ]

    ObjListTile = [
        Tile(currentTile, name) for currentTile, name in zip(common, tiles)
    ]
    ObjListTile_sort = sorted(ObjListTile, key=priorityKey)

    tmpFile = pathOut + "/TMP"

    if pathWd:
        tmpFile = pathWd + "/TMP"
    if not os.path.exists(tmpFile):
        os.mkdir(tmpFile)
    genTileEnvPrio(ObjListTile_sort, pathOut, tmpFile, proj)
    AllPRIO = fu.FileSearch_AND(tmpFile, True, "_PRIO.shp")
    for prioTile in AllPRIO:
        tileName = prioTile.split("/")[-1].split("_")[0]
        fu.cpShapeFile(prioTile.replace(".shp", ""), pathOut + "/" + tileName,
                       [".prj", ".shp", ".dbf", ".shx"])

    shutil.rmtree(tmpFile)
    shutil.rmtree(commonDirectory)
Пример #3
0
    def test_extract_maj_vote_samples(self):
        """
        test the extraction of samples by class according to a ratio
        """
        from iota2.Sampling.VectorFormatting import extract_maj_vote_samples
        from collections import Counter

        # define inputs
        in_vector_name = os.path.basename(self.in_vector)
        extracted_vector_name = "extracted_samples.sqlite"
        in_vector = os.path.join(self.test_working_directory, in_vector_name)
        extracted_vector = os.path.join(self.test_working_directory,
                                        extracted_vector_name)
        fut.cpShapeFile(self.in_vector.replace(".shp", ""),
                        in_vector.replace(".shp", ""),
                        [".prj", ".shp", ".dbf", ".shx"])

        # launch function
        dataField = "code"
        regionField = "region"
        extraction_ratio = 0.5
        extract_maj_vote_samples(in_vector, extracted_vector, extraction_ratio,
                                 dataField, regionField)
        # assert
        features_origin = fut.getFieldElement(self.in_vector,
                                              driverName="ESRI Shapefile",
                                              field=dataField,
                                              mode="all",
                                              elemType="str")
        by_class_origin = Counter(features_origin)

        features_in_vector = fut.getFieldElement(in_vector,
                                                 driverName="ESRI Shapefile",
                                                 field=dataField,
                                                 mode="all",
                                                 elemType="str")
        by_class_in_vector = Counter(features_in_vector)

        features_extract_vector = fut.getFieldElement(extracted_vector,
                                                      driverName="SQLite",
                                                      field=dataField,
                                                      mode="all",
                                                      elemType="str")
        by_class_extract_vector = Counter(features_extract_vector)

        buff = []
        for class_name, class_count in list(by_class_origin.items()):
            buff.append(by_class_in_vector[class_name] == extraction_ratio *
                        class_count)

        self.assertTrue(all(buff), msg="extraction of samples failed")
Пример #4
0
def erodeDiag(currentTile, NextTile, intersection, buff, TMP, proj):

    xo, yo = currentTile.getOrigin()  #tuile la plus prio
    xn, yn = NextTile.getOrigin()
    Extent = getShapeExtent(intersection)  #[minX, maxX, minY, maxY]

    if yo > yn and xo > xn:
        minX = Extent[1] - buff
        maxX = Extent[1]
        minY = Extent[2]
        maxY = Extent[3]

        fu.removeShape(intersection.replace(".shp", ""),
                       [".prj", ".shp", ".dbf", ".shx"])
        pathFolder = "/".join(
            intersection.split("/")[0:len(intersection.split("/")) - 1])
        createShape(minX, minY, maxX, maxY, pathFolder,
                    intersection.split("/")[-1].replace(".shp", ""), proj)

        tmpName = NextTile.getName() + "_TMP"
        subtractShape(NextTile.getPriorityEnv(), intersection, TMP, tmpName,
                      proj)

        fu.removeShape(NextTile.getPriorityEnv().replace(".shp", ""),
                       [".prj", ".shp", ".dbf", ".shx"])
        fu.cpShapeFile(TMP+"/"+tmpName.replace(".shp", ""), NextTile.getPriorityEnv().replace(".shp", ""),\
                               [".prj", ".shp", ".dbf", ".shx"])
        fu.removeShape(TMP + "/" + tmpName.replace(".shp", ""),
                       [".prj", ".shp", ".dbf", ".shx"])

        tmpName = currentTile.getName() + "_TMP"
        subtractShape(currentTile.getPriorityEnv(), NextTile.getPriorityEnv(),
                      TMP, tmpName, proj)

        fu.removeShape(currentTile.getPriorityEnv().replace(".shp", ""),
                       [".prj", ".shp", ".dbf", ".shx"])
        fu.cpShapeFile(TMP+"/"+tmpName.replace(".shp", ""), currentTile.getPriorityEnv().replace(".shp", ""),\
                               [".prj", ".shp", ".dbf", ".shx"])
        fu.removeShape(TMP + "/" + tmpName.replace(".shp", ""),
                       [".prj", ".shp", ".dbf", ".shx"])

    if yo > yn and xo < xn:

        tmpName = NextTile.getName() + "_TMP"
        subtractShape(NextTile.getPriorityEnv(), currentTile.getPriorityEnv(),
                      TMP, tmpName, proj)

        fu.removeShape(NextTile.getPriorityEnv().replace(".shp", ""),
                       [".prj", ".shp", ".dbf", ".shx"])
        fu.cpShapeFile(TMP+"/"+tmpName.replace(".shp", ""), NextTile.getPriorityEnv().replace(".shp", ""),\
                               [".prj", ".shp", ".dbf", ".shx"])
        fu.removeShape(TMP + "/" + tmpName.replace(".shp", ""),
                       [".prj", ".shp", ".dbf", ".shx"])
Пример #5
0
    return newshp


if __name__ == "__main__":
    if len(sys.argv) == 1:
        prog = os.path.basename(sys.argv[0])
        print(('      ' + sys.argv[0] + ' [options]'))
        print(("     Help : ", prog, " --help"))
        print(("        or : ", prog, " -h"))
        sys.exit(-1)
    else:
        usage = "usage: %prog [options] "
        parser = argparse.ArgumentParser(
            description="Delete double geometries in a shapefile")
        parser.add_argument("-s",
                            dest="shapefile",
                            action="store",
                            help="Input shapefile",
                            required=True)
        parser.add_argument("-o",
                            dest="outpath",
                            action="store",
                            help="output path")
        args = parser.parse_args()
        print('Delete duplicate geometries...')
        newshp = DeleteDupGeom(args.shapefile)
        basenewshp = os.path.splitext(newshp)
        if args.outpath:
            FileUtils.cpShapeFile(basenewshp, args.outpath,
                                  [".prj", ".shp", ".dbf", ".shx"], True)
Пример #6
0
def genTileEnvPrio(ObjListTile, out, tmpFile, proj):

    buff = 600  #offset in order to manage nodata in image's border

    ObjListTile.reverse()
    listSHP = [
        createRasterFootprint(c_ObjListTile.getPath(),
                              tmpFile + "/" + c_ObjListTile.getName() + ".shp")
        for c_ObjListTile in ObjListTile
    ]

    for env, currentTile in zip(listSHP, ObjListTile):
        currentTile.setEnvelope(env)
        currentTile.setPriorityEnv(env.replace(".shp", "_PRIO.shp"))
        fu.cpShapeFile(env.replace(".shp", ""),
                       env.replace(".shp", "") + "_PRIO",
                       [".prj", ".shp", ".dbf", ".shx"])

    for i in range(len(ObjListTile)):
        currentTileEnv = ObjListTile[i].getEnvelope()
        for j in range(1 + i, len(ObjListTile)):
            NextTileEnv = ObjListTile[j].getEnvelope()
            if IsIntersect(currentTileEnv, NextTileEnv):

                InterName = ObjListTile[i].getName(
                ) + "_inter_" + ObjListTile[j].getName()
                intersection = fu.ClipVectorData(ObjListTile[i].getEnvelope(), ObjListTile[j].getEnvelope(),\
                                                                 tmpFile, InterName)
                notDiag = erodeInter(ObjListTile[i], ObjListTile[j],
                                     intersection, buff, proj)
                if notDiag:
                    tmpName = ObjListTile[i].getName() + "_TMP"
                    subtractShape(ObjListTile[i].getPriorityEnv(),
                                  intersection, tmpFile, tmpName, proj)

                    fu.removeShape(ObjListTile[i].getPriorityEnv().replace(".shp", ""),\
                                                       [".prj", ".shp", ".dbf", ".shx"])
                    fu.cpShapeFile(tmpFile+"/"+tmpName.replace(".shp", ""),\
                                                       ObjListTile[i].getPriorityEnv().replace(".shp", ""),\
                                                       [".prj", ".shp", ".dbf", ".shx"])
                    fu.removeShape(tmpFile + "/" + tmpName.replace(".shp", ""),
                                   [".prj", ".shp", ".dbf", ".shx"])

    ObjListTile.reverse()
    for i in range(len(ObjListTile)):
        currentTileEnv = ObjListTile[i].getEnvelope()
        for j in range(1 + i, len(ObjListTile)):
            NextTileEnv = ObjListTile[j].getEnvelope()
            if IsIntersect(currentTileEnv, NextTileEnv):
                if diag(ObjListTile[i], ObjListTile[j]):
                    InterName = ObjListTile[i].getName(
                    ) + "_inter_" + ObjListTile[j].getName()
                    intersection = fu.ClipVectorData(ObjListTile[i].getEnvelope(), ObjListTile[j].getEnvelope(),\
                                                                         tmpFile, InterName)
                    erodeDiag(ObjListTile[i], ObjListTile[j], intersection,
                              buff, tmpFile, proj)
                else:
                    tmpName = ObjListTile[i].getName() + "_TMP"
                    subtractShape(ObjListTile[i].getPriorityEnv(), ObjListTile[j].getPriorityEnv(),\
                                                      tmpFile, tmpName, proj)

                    fu.removeShape(ObjListTile[i].getPriorityEnv().replace(".shp", ""),\
                                                       [".prj", ".shp", ".dbf", ".shx"])
                    fu.cpShapeFile(tmpFile+"/"+tmpName.replace(".shp", ""),\
                                                       ObjListTile[i].getPriorityEnv().replace(".shp", ""),\
                                                       [".prj", ".shp", ".dbf", ".shx"])
                    fu.removeShape(tmpFile + "/" + tmpName.replace(".shp", ""),
                                   [".prj", ".shp", ".dbf", ".shx"])
Пример #7
0
    def test_VectorFormatting(self):
        """
        test vectorFormatting function
        random function is used in Sampling.VectorFormatting.VectorFormatting
        we can only check if there is expected number of features with
        expected fields and some features values
        """
        from iota2.Sampling.VectorFormatting import vector_formatting
        from iota2.Common import ServiceConfigFile as SCF
        from iota2.Common import IOTA2Directory
        from iota2.Common.Utils import run
        from iota2.VectorTools.ChangeNameField import changeName

        # define inputs
        test_output = os.path.join(self.test_working_directory,
                                   "IOTA2_dir_VectorFormatting")
        # prepare ground truth
        ground_truth = os.path.join(self.test_working_directory,
                                    "groundTruth_test.shp")
        cmd = "ogr2ogr -s_srs EPSG:2154 -t_srs EPSG:2154 -dialect 'SQLite' -sql 'select GEOMETRY,code from t31tcj' {} {}".format(
            ground_truth, self.in_vector)
        run(cmd)

        # cfg instance
        runs = 2
        cfg = SCF.serviceConfigFile(self.config_test)
        cfg.setParam('chain', 'outputPath', test_output)
        cfg.setParam('chain', 'groundTruth', ground_truth)
        cfg.setParam('chain', 'dataField', "code")
        cfg.setParam('chain', 'cloud_threshold', 0)
        cfg.setParam('chain', 'merge_final_classifications', False)
        cfg.setParam('chain', 'runs', runs)
        cfg.setParam('GlobChain', 'proj', "EPSG:2154")
        cfg.setParam('chain', 'regionPath', self.ref_region)

        IOTA2Directory.generate_directories(test_output, check_inputs=False)

        # prepare expected function inputs
        t31tcj_feat_dir = os.path.join(self.test_working_directory,
                                       "IOTA2_dir_VectorFormatting",
                                       "features", "T31TCJ")
        os.mkdir(t31tcj_feat_dir)
        # prepare ref img
        t31tcj_ref_img = os.path.join(t31tcj_feat_dir, "MaskCommunSL.tif")
        shutil.copy(self.ref_img, t31tcj_ref_img)
        # prepare envelope
        envelope_name = "T31TCJ.shp"
        envelope_path = os.path.join(self.test_working_directory,
                                     "IOTA2_dir_VectorFormatting", "envelope",
                                     envelope_name)
        fut.cpShapeFile(self.ref_region.replace(".shp", ""),
                        envelope_path.replace(".shp", ""),
                        [".prj", ".shp", ".dbf", ".shx"])
        changeName(envelope_path, "region", "FID")
        # prepare cloud mask
        cloud_name = "CloudThreshold_0.shp"
        cloud_path = os.path.join(self.test_working_directory,
                                  "IOTA2_dir_VectorFormatting", "features",
                                  "T31TCJ", cloud_name)
        fut.cpShapeFile(self.ref_region.replace(".shp", ""),
                        cloud_path.replace(".shp", ""),
                        [".prj", ".shp", ".dbf", ".shx"])
        changeName(cloud_path, "region", "cloud")

        # launch function
        ratio = cfg.getParam('chain', 'ratio')
        random_seed = cfg.getParam('chain', 'random_seed')
        enable_cross_validation = cfg.getParam("chain",
                                               "enableCrossValidation")
        enable_split_ground_truth = cfg.getParam('chain', 'splitGroundTruth')
        fusion_merge_all_validation = cfg.getParam(
            'chain', 'fusionOfClassificationAllSamplesValidation')
        merge_final_classifications = cfg.getParam(
            'chain', 'merge_final_classifications')
        merge_final_classifications_ratio = cfg.getParam(
            'chain', 'merge_final_classifications_ratio')
        region_vec = cfg.getParam('chain', 'regionPath')
        epsg = int(cfg.getParam('GlobChain', 'proj').split(":")[-1])
        region_field = (cfg.getParam('chain', 'regionField'))
        vector_formatting("T31TCJ",
                          test_output,
                          ground_truth,
                          "code",
                          0,
                          ratio,
                          random_seed,
                          enable_cross_validation,
                          enable_split_ground_truth,
                          fusion_merge_all_validation,
                          runs,
                          epsg,
                          region_field,
                          merge_final_classifications,
                          merge_final_classifications_ratio,
                          region_vec,
                          working_directory=None)

        # assert
        nb_features_origin = len(
            fut.getFieldElement(ground_truth,
                                driverName="ESRI Shapefile",
                                field="code",
                                mode="all",
                                elemType="str"))

        test_vector = fut.FileSearch_AND(
            os.path.join(test_output, "formattingVectors"), True,
            "T31TCJ.shp")[0]
        nb_features_test = len(
            fut.getFieldElement(test_vector,
                                driverName="ESRI Shapefile",
                                field="code",
                                mode="all",
                                elemType="str"))
        # check nb features
        self.assertTrue(nb_features_origin == nb_features_test,
                        msg="wrong number of features")

        # check fields
        origin_fields = fut.get_all_fields_in_shape(ground_truth)
        test_fields = fut.get_all_fields_in_shape(test_vector)

        new_fields = ['region', 'originfid', 'seed_0', 'seed_1', 'tile_o']
        expected_fields = origin_fields + new_fields
        self.assertTrue(len(expected_fields) == len(test_fields))
        self.assertTrue(all(field in test_fields for field in expected_fields))
Пример #8
0
def samples_merge(region_tiles_seed: str, output_path: str, region_field: str,
                  runs: int, enable_cross_validation: bool,
                  ds_sar_opt_flag: bool, working_directory: str) -> None:
    """
    to a given region and seed, extract features through tiles
    then merge features to a new file
    """

    region, tiles, seed = region_tiles_seed

    formatting_vec_dir = os.path.join(output_path, "formattingVectors")
    samples_selection_dir = os.path.join(output_path, "samplesSelection")
    learn_val_dir = os.path.join(output_path, "dataAppVal")

    by_models_val = os.path.join(learn_val_dir, "bymodels")
    if not os.path.exists(by_models_val):
        try:
            os.mkdir(by_models_val)
        except OSError:
            pass
    wd_val = by_models_val
    work_dir = samples_selection_dir

    if working_directory:
        work_dir = working_directory
        wd_val = working_directory

    cross_validation_field = None
    if ds_sar_opt_flag and enable_cross_validation:
        cross_validation_field = "seed_{}".format(runs - 1)

    vector_region = []
    vector_region_val = []
    for tile in tiles:
        vector_tile = fut.FileSearch_AND(formatting_vec_dir, True, tile,
                                         ".shp")[0]
        poi_name = f"{tile}_region_{region}_seed_{seed}_samples.shp"
        poi_learn = os.path.join(work_dir, poi_name)
        poi_val = None
        # if SAR and Optical post-classification fusion extract validation
        # samples
        if ds_sar_opt_flag:
            poi_val_name = "{tile}_region_{region}_seed_{seed}_samples_val.shp"
            poi_val = os.path.join(wd_val, poi_val_name)
            vector_region_val.append(poi_val)
        extract_poi(vector_tile, region, seed, region_field, poi_learn,
                    poi_val, cross_validation_field)
        vector_region.append(poi_learn)

    merged_poi_name = f"samples_region_{region}_seed_{seed}"
    merged_poi = fut.mergeVectors(merged_poi_name, work_dir, vector_region)

    for vector_r in vector_region:
        fut.removeShape(vector_r.replace(".shp", ""),
                        [".prj", ".shp", ".dbf", ".shx"])

    if working_directory:
        fut.cpShapeFile(merged_poi.replace(".shp", ""),
                        samples_selection_dir,
                        [".prj", ".shp", ".dbf", ".shx"],
                        spe=True)
        if ds_sar_opt_flag:
            for vector_validation in vector_region_val:
                if os.path.exists(vector_validation):
                    fut.cpShapeFile(vector_validation.replace(".shp", ""),
                                    by_models_val,
                                    [".prj", ".shp", ".dbf", ".shx"],
                                    spe=True)
Пример #9
0
def vector_formatting(
        tile_name: str,
        output_path: str,
        ground_truth_vec: str,
        data_field: str,
        cloud_threshold: float,
        ratio: float,
        random_seed: int,
        enable_cross_validation: bool,
        enable_split_ground_truth: bool,
        fusion_merge_all_validation: bool,
        runs: int,
        epsg: int,
        region_field: str,
        merge_final_classifications: Optional[bool] = False,
        merge_final_classifications_ratio: Optional[float] = None,
        region_vec: Optional[str] = None,
        working_directory=None,
        logger=LOGGER) -> None:
    """
    dedicated to extract samples by class according to a ratio
    or a fold number.

    Parameters
    ----------
    tile_name: str
        tile name
    output_path: str
        iota2 output path
    ground_truth_vec: str
        path to the ground truth database
    data_field: str
        field into the database contining class labels
    cloud_threshold: float
        cloud threshold to pick up polygons in database
    ratio: float
        ratio between learning and validation samples-set
    random_seed: int
        initialize the random seed
    enable_cross_validation: bool
        is iota2 cross validation enable ? TODO : remove
        this parameter
    enable_split_ground_truth: bool,
        flag to split input database in learning and validation
        samples-set
    fusion_merge_all_validation: bool
        flag to merge all classifications
    runs: int
        number of random learning/validation samples-set
    epsg: int
        epsg code
    region_field: str
        region field in region database
    merge_final_classifications: bool
        inform if finals classifications will be merged
    merge_final_classifications_ratio : float
        ratio of samples to extract by tile and by region
        in order to compute confusion matrix on classification fusion
    region_vec: str
        region database
    working_directory : str
        path to a working directory
    logger: logging
        root logger
    """
    from iota2.Common import FileUtils as fut
    from iota2.VectorTools import spatialOperations as intersect
    from iota2.VectorTools import ChangeNameField
    from iota2.Sampling import SplitInSubSets as subset
    from iota2.VectorTools.AddField import addField
    # const
    tile_field = "tile_o"
    formatting_directory = os.path.join(output_path, "formattingVectors")
    if working_directory:
        formatting_directory = working_directory
    output_name = tile_name + ".shp"

    output = os.path.join(formatting_directory, output_name)
    features_directory = os.path.join(output_path, "features")
    cloud_vec = os.path.join(features_directory, tile_name,
                             f"CloudThreshold_{cloud_threshold}.shp")
    tile_env_vec = os.path.join(output_path, "envelope", f"{tile_name}.shp")

    region_field = region_field.lower()
    split_directory = os.path.join(output_path, "dataAppVal")
    final_directory = os.path.join(output_path, "final")

    if not region_vec:
        region_vec = os.path.join(output_path, "MyRegion.shp")
    if merge_final_classifications:
        wd_maj_vote = os.path.join(final_directory,
                                   "merge_final_classifications")
        if working_directory:
            wd_maj_vote = working_directory

    output_driver = "SQlite"
    if os.path.splitext(os.path.basename(output))[-1] == ".shp":
        output_driver = "ESRI Shapefile"

    work_dir = os.path.join(output_path, "formattingVectors")
    if working_directory:
        work_dir = working_directory

    work_dir = os.path.join(work_dir, tile_name)
    try:
        os.mkdir(work_dir)
    except OSError:
        logger.warning(f"{work_dir} allready exists")

    # log
    logger.info(f"formatting vector for tile : {tile_name}")
    logger.debug(f"output : {output}")
    logger.debug(f"groundTruth : {ground_truth_vec}")
    logger.debug(f"cloud : {cloud_vec}")
    logger.debug(f"region : {region_vec}")
    logger.debug(f"tile envelope : {tile_env_vec}")
    logger.debug(f"data field : {data_field}")
    logger.debug(f"region field : {region_field}")
    logger.debug(f"ratio : {ratio}")
    logger.debug(f"seeds : {runs}")
    logger.debug(f"epsg : {epsg}")
    logger.debug(f"workingDirectory : {work_dir}")

    img_ref = fut.FileSearch_AND(os.path.join(features_directory, tile_name),
                                 True, ".tif")[0]

    logger.info("launch intersection between tile's envelope and regions")
    tile_region = os.path.join(work_dir, "tileRegion_" + tile_name + ".sqlite")
    region_tile_intersection = intersect.intersectSqlites(tile_env_vec,
                                                          region_vec,
                                                          work_dir,
                                                          tile_region,
                                                          epsg,
                                                          "intersection",
                                                          [region_field],
                                                          vectformat='SQLite')
    if not region_tile_intersection:
        error_msg = (
            f"there is no intersections between the tile '{tile_name}' "
            f"and the region shape '{region_vec}'")
        logger.critical(error_msg)
        raise Exception(error_msg)

    region_vector_name = os.path.splitext(os.path.basename(region_vec))[0]
    create_tile_region_masks(tile_region, region_field, tile_name,
                             os.path.join(output_path, "shapeRegion"),
                             region_vector_name, img_ref)

    logger.info(
        "launch intersection between tile's envelopeRegion and groundTruth")
    tile_region_ground_truth = os.path.join(
        work_dir, "tileRegionGroundTruth_" + tile_name + ".sqlite")

    if intersect.intersectSqlites(tile_region,
                                  ground_truth_vec,
                                  work_dir,
                                  tile_region_ground_truth,
                                  epsg,
                                  "intersection",
                                  [data_field, region_field, "ogc_fid"],
                                  vectformat='SQLite') is False:
        warning_msg = (
            f"there si no intersections between the tile "
            f"'{tile_name}' and the ground truth '{ground_truth_vec}'")
        logger.warning(warning_msg)
        return None

    logger.info("remove un-usable samples")

    intersect.intersectSqlites(tile_region_ground_truth,
                               cloud_vec,
                               work_dir,
                               output,
                               epsg,
                               "intersection",
                               [data_field, region_field, "t2_ogc_fid"],
                               vectformat='SQLite')

    os.remove(tile_region)
    os.remove(tile_region_ground_truth)

    # rename field t2_ogc_fid to originfig which correspond
    # to the polygon number
    ChangeNameField.changeName(output, "t2_ogc_fid", "originfid")

    if merge_final_classifications and fusion_merge_all_validation is False:
        maj_vote_sample_tile_name = "{}_majvote.sqlite".format(tile_name)
        maj_vote_sample_tile = os.path.join(wd_maj_vote,
                                            maj_vote_sample_tile_name)
        if enable_cross_validation is False:
            extract_maj_vote_samples(output,
                                     maj_vote_sample_tile,
                                     merge_final_classifications_ratio,
                                     data_field,
                                     region_field,
                                     driver_name="ESRI Shapefile")

    logger.info(f"split {output} in {runs} subsets with the ratio {ratio}")
    subset.splitInSubSets(output,
                          data_field,
                          region_field,
                          ratio,
                          runs,
                          output_driver,
                          crossValidation=enable_cross_validation,
                          splitGroundTruth=enable_split_ground_truth,
                          random_seed=random_seed)

    addField(output,
             tile_field,
             tile_name,
             valueType=str,
             driver_name=output_driver)

    split_dir = split_directory
    if working_directory:
        split_dir = work_dir

    # splits by learning and validation sets (use in validations steps)
    output_splits = split_by_sets(output,
                                  runs,
                                  split_dir,
                                  epsg,
                                  epsg,
                                  tile_name,
                                  cross_valid=enable_cross_validation,
                                  split_ground_truth=enable_split_ground_truth)
    if working_directory:
        if output_driver == "SQLite":
            shutil.copy(output, os.path.join(output_path, "formattingVectors"))
            os.remove(output)

        elif output_driver == "ESRI Shapefile":
            fut.cpShapeFile(output.replace(".shp", ""),
                            os.path.join(output_path, "formattingVectors"),
                            [".prj", ".shp", ".dbf", ".shx"], True)
            fut.removeShape(output.replace(".shp", ""),
                            [".prj", ".shp", ".dbf", ".shx"])

        for current_split in output_splits:
            shutil.copy(current_split, os.path.join(output_path, "dataAppVal"))
            os.remove(current_split)

        if (merge_final_classifications and enable_cross_validation is False
                and fusion_merge_all_validation is False):
            shutil.copy(
                maj_vote_sample_tile,
                os.path.join(final_directory, "merge_final_classifications"))