Example #1
0
def getAllDatasByDate(currentTile, rasterPatterns, masksPatterns, arboRaster,
                      arboMask, TileFolder, rasterInitValue, masksInitValues):
    # get all raster into the current tile and store it with init value
    buf = []
    r = []
    for currentPattern in rasterPatterns:
        r += fu.fileSearchRegEx(TileFolder + currentTile + arboRaster +
                                currentPattern)
    for currentR in r:
        buf.append((currentR, rasterInitValue))

    for currentPattern, currentMaskInit in zip(masksPatterns, masksInitValues):
        m = []
        m += fu.fileSearchRegEx(TileFolder + currentTile + arboMask +
                                currentPattern)
        for currentM in m:
            buf.append((currentM, currentMaskInit))
            # sort it by date
    buff = [(getDateFromRaster(currentRaster[0]), currentRaster)
            for currentRaster in buf]
    buff = fu.sortByFirstElem(buff)
    allDates = []
    allRasters = []
    for date, rasters in buff:
        allDates.append(date)
        allRasters.append(rasters)
    return allRasters, allDates
Example #2
0
def getIntersections(outGridPath, inGridPath, tileField_first,
                     tileField_second):
    """
    OUT
    AllIntersections [list of tuple] : [(S2Tile,[L8Tile,L8Tile,L8Tile]),(...),...]
    """
    driver = ogr.GetDriverByName("ESRI Shapefile")
    dataOut = driver.Open(outGridPath, 0)
    dataIn = driver.Open(inGridPath, 0)

    layerOut = dataOut.GetLayer()
    layerIn = dataIn.GetLayer()

    AllIntersections = []  # Ex : [[S2,[L8,L8,...,L8]],[],...]

    outTiles = [(outTile.GetGeometryRef().Clone(),
                 outTile.GetField(tileField_first)) for outTile in layerOut]
    inTiles = [(inTile.GetGeometryRef().Clone(),
                inTile.GetField(tileField_second)) for inTile in layerIn]

    for outTileGeom, outTile in outTiles:
        for inTileGeom, inTile in inTiles:
            intersection = outTileGeom.Intersection(inTileGeom)
            if intersection.GetArea() != 0.0 and (
                    outTile, inTile) not in AllIntersections:
                AllIntersections.append((outTile, inTile))
    return fu.sortByFirstElem(AllIntersections)
Example #3
0
def getModel(pathShapes):
    sort = []
    pathAppVal = fu.FileSearch_AND(pathShapes, True, "seed0", ".shp", "learn")
    for path in pathAppVal:
        try:
            ind = sort.index((int(path.split("/")[-1].split("_")[-3]),
                              path.split("/")[-1].split("_")[0]))
        except ValueError:
            sort.append((path.split("/")[-1].split("_")[-3],
                         path.split("/")[-1].split("_")[0]))
    return fu.sortByFirstElem(
        sort)  # [(RegionNumber,[tile1,tile2,...]),(...),...]
Example #4
0
def confFusion(shapeIn, dataField, csv_out, txt_out, csvPath, pathConf):
    f = file(pathConf)
    cfg = Config(f)

    N = int(cfg.chain.runs)
    cropMix = Config(file(pathConf)).argTrain.cropMix
    annualCrop = Config(file(pathConf)).argTrain.annualCrop
    labelReplacement, labelName = Config(
        file(pathConf)).argTrain.ACropLabelReplacement
    labelReplacement = int(labelReplacement)

    for seed in range(N):
        # Recherche de toute les classes possible
        AllClass = []
        AllClass = fu.getFieldElement(shapeIn, "ESRI Shapefile", dataField,
                                      "unique")
        AllClass = sorted(AllClass)
        # Initialisation de la matrice finale
        AllConf = fu.FileSearch_AND(csvPath, True,
                                    "seed_" + str(seed) + ".csv")
        csv = fu.confCoordinatesCSV(AllConf)
        csv_f = fu.sortByFirstElem(csv)

        confMat = fu.gen_confusionMatrix(csv_f, AllClass)
        if cropMix == 'True':
            writeCSV(confMat, AllClass,
                     csv_out + "/MatrixBeforeClassMerge_" + str(seed) + ".csv")
            confMat, AllClass = replaceAnnualCropInConfMat(
                confMat, AllClass, annualCrop, labelReplacement)
            writeCSV(confMat, AllClass,
                     csv_out + "/Classif_Seed_" + str(seed) + ".csv")
        else:
            writeCSV(confMat, AllClass,
                     csv_out + "/Classif_Seed_" + str(seed) + ".csv")

        nbrGood = confMat.trace()
        nbrSample = confMat.sum()

        overallAccuracy = float(nbrGood) / float(nbrSample)
        kappa = computeKappa(confMat)
        Pre = computePreByClass(confMat, AllClass)
        Rec = computeRecByClass(confMat, AllClass)
        Fs = computeFsByClass(Pre, Rec, AllClass)

        writeResults(
            Fs, Rec, Pre, kappa, overallAccuracy, AllClass,
            txt_out + "/ClassificationResults_seed_" + str(seed) + ".txt")
Example #5
0
def getNbSample(shape, tile, dataField, valToFind, resol, region):
    driver = ogr.GetDriverByName("ESRI Shapefile")
    buff = []
    dataSource = driver.Open(shape, 0)
    layer = dataSource.GetLayer()
    for feature in layer:
        if str(feature.GetField(dataField)) in valToFind:
            geom = feature.GetGeometryRef()
            buff.append((feature.GetField(dataField), geom.GetArea()))
    rep = fu.sortByFirstElem(buff)
    repDict = {}
    for currentClass, currentAreas in rep:
        array = np.asarray(currentAreas)
        totalArea = np.sum(array)
        repDict[currentClass] = int(totalArea / (int(resol) * int(resol)))
    print repDict
    return repDict
Example #6
0
def getAreaByRegion(allShape):
    """
    IN :
        allShape [list] : list of path to ground truth shapeFile
    OUT :
        allArea [list] list of ground truth's area by regions in meter square
    """
    shapeSort = []
    for shape in allShape:
        region = shape.split("_")[-4]
        shapeSort.append([region, shape])
    shapeSort = fu.sortByFirstElem(shapeSort)
    allArea = []
    for region, shapesRegion in shapeSort:
        area = 0
        for shapeF in shapesRegion:
            area += rs.getShapeSurface(shapeF)
        allArea.append([region, area])
    return allArea
Example #7
0
def SplitShape(shapeIN, dataField, folds, outPath, outName):
    """
    this function split a shape in "folds" new shape.
    IN :
        shapeIN [string] : path to the shape to split
        dataField [string] : data's Field into shape
        folds [int] : number of split
        outPath [string] : path to the store new shapes
        outName [string] : new shapes names
    OUT :
        "folds" new shapes
    """
    AllFields = fu.getAllFieldsInShape(shapeIN, "ESRI Shapefile")
    driver = ogr.GetDriverByName("ESRI Shapefile")
    dataSource = driver.Open(shapeIN, 0)
    layer = dataSource.GetLayer()
    buff = []
    for feature in layer:
        FID = feature.GetFID()
        cl = feature.GetField(dataField)
        buff.append([cl, FID])

    buff = fu.sortByFirstElem(buff)
    cl_fold = []
    for cl, FID_cl in buff:
        fold = splitList(FID_cl, folds)
        cl_fold.append([cl, fold])

    id_fold = []
    for i in range(len(cl_fold)):
        foldNumber = 1
        for currentFold in cl_fold[i][1]:
            for FID in currentFold:
                id_fold.append([foldNumber, FID])
            foldNumber += 1

    id_fold = fu.sortByFirstElem(
        id_fold)  # [[foldNumber,[allClassFID]],[],...]
    shapeCreated = []
    for foldNumber, AllFID in id_fold:
        listFid = []
        for fid in AllFID:
            listFid.append("FID=" + str(fid))
        resultA = []
        for e in listFid:
            resultA.append(e)
            resultA.append(' OR ')
        resultA.pop()
        chA = ''.join(resultA)
        layer.SetAttributeFilter(chA)

        origin_name = outName.split("_")
        origin_name[2] = origin_name[2] + "f" + str(foldNumber)
        # origin_name.insert(3,"f"+str(foldNumber))
        nameOut = "_".join(origin_name)

        outShapefile = outPath + "/" + nameOut
        print outShapefile
        fu.CreateNewLayer(layer, outShapefile, AllFields)
        shapeCreated.append(outShapefile)
    return shapeCreated
Example #8
0
def launchTraining(pathShapes, pathConf, pathToTiles, dataField, stat, N,
                   pathToCmdTrain, out, pathWd, pathlog):
    """
    OUT : les commandes pour l'app
    """
    cmd_out = []

    f = file(pathConf)
    cfg = Config(f)
    classif = cfg.argTrain.classifier
    options = cfg.argTrain.options
    outputPath = cfg.chain.outputPath
    samplesMode = Config(file(pathConf)).argTrain.shapeMode
    dataField = Config(file(pathConf)).chain.dataField
    binding = Config(file(pathConf)).GlobChain.bindingPython

    posModel = -3  # model's position, if training shape is split by "_"

    Stack_ind = fu.getFeatStackName(pathConf)

    pathToModelConfig = outputPath + "/config_model/configModel.cfg"
    configModel = open(pathToModelConfig, "w")
    configModel.write("AllModel:\n[\n")
    configModel.close()
    for seed in range(N):
        pathAppVal = fu.FileSearch_AND(pathShapes, True, "seed" + str(seed),
                                       ".shp", "learn")
        sort = [(path.split("/")[-1].split("_")[posModel], path)
                for path in pathAppVal]
        sort = fu.sortByFirstElem(sort)
        # get tiles by model
        names = []
        for r, paths in sort:
            tmp = ""
            for i in range(len(paths)):
                if i < len(paths) - 1:
                    tmp = tmp + paths[i].split("/")[-1].split("_")[0] + "_"
                else:
                    tmp = tmp + paths[i].split("/")[-1].split("_")[0]
            names.append(tmp)
        cpt = 0
        for r, paths in sort:
            writeConfigName(r, names[cpt], pathToModelConfig)
            cpt += 1
        if samplesMode == "points":
            pathAppVal = fu.FileSearch_AND(outputPath + "/learningSamples",
                                           True, "seed" + str(seed), ".sqlite",
                                           "learn")
            sort = [(path.split("/")[-1].split("_")[posModel], path)
                    for path in pathAppVal]

        for r, paths in sort:
            print r
            if samplesMode != "points":
                cmd = buildTrainCmd_poly(r, paths, pathToTiles, Stack_ind,
                                         classif, options, dataField, out,
                                         seed, stat, pathlog)
            else:
                if binding == "True" and classif == "svm":
                    outStats = outputPath + "/stats/Model_" + r + ".xml"
                    if os.path.exists(outStats):
                        os.remove(outStats)
                    writeStatsFromSample(paths, outStats)
                cmd = buildTrainCmd_points(r, paths, classif, options,
                                           dataField, out, seed, stat, pathlog)
            cmd_out.append(cmd)

    configModel = open(pathToModelConfig, "a")
    configModel.write("\n]\n")
    configModel.close()

    fu.writeCmds(pathToCmdTrain + "/train.txt", cmd_out)

    return cmd_out
Example #9
0
def genResults(pathRes, pathNom):
    mode = "mean"
    # génération de la matrice de confusion moyenne (moyenne entre tt les .csv dans le dossier)
    ComputeAllMatrix(mode, pathRes + "/TMP", pathRes + "/TMP/mean.csv")

    resfile = open(pathRes + "/RESULTS.txt", "w")
    resfile.write("#row = reference\n#col = production\n\n")
    resfile.write("*********** Matrice de confusion : %s ***********\n" %
                  (mode))
    ConfMatrix(pathRes + "/TMP/mean.csv", pathNom,
               resfile)  # Ecriture de la matrice de confusion
    listClass, PreClass, RcallClass, FSClass, Kappa, OA = getCoeff(
        pathRes + "/TMP", pathNom)  # Récupération de toutes les valeurs

    Table_num, Table_cl = getNomenclature(pathNom)
    AllClass = []
    for num, cl in zip(Table_num, Table_cl):
        for Cclass in listClass:
            if cl == Cclass:
                AllClass.append(float(num))

    AllClass = sorted(AllClass)
    csv = fu.confCoordinatesCSV([pathRes + "/TMP/mean.csv"])
    csv_f = fu.sortByFirstElem(csv)
    confMat = fu.gen_confusionMatrix(csv_f, AllClass)
    nbMaxConf = 3
    classRef = 0
    maxConf = []
    for raw in confMat:
        indMaxConf = heapq.nlargest(nbMaxConf, range(len(raw)),
                                    raw.__getitem__)
        ClassConf = ", ".join([
            Table_cl[Table_num.index(AllClass[currentInd])]
            for currentInd in indMaxConf
        ])
        maxConf.append(
            (Table_cl[Table_num.index(AllClass[classRef])], ClassConf))
        classRef += 1

        # Calcul des intervalles de confiances
    PreMean = []
    PreI = []
    RecMean = []
    RecI = []
    FSMean = []
    FSI = []

    # Les strings
    Pre_S = []
    Rec_S = []
    FS_S = []
    for i in range(len(listClass)):
        # Compute mean
        PreMean.append("{:.3f}".format(float(np.mean(PreClass[i]))))
        RecMean.append("{:.3f}".format(float(np.mean(RcallClass[i]))))
        FSMean.append("{:.3f}".format(float(np.mean(FSClass[i]))))

        binf, bSup = stats.t.interval(0.95,
                                      len(listClass) - 1,
                                      loc=np.mean(np.mean(PreClass[i])),
                                      scale=stats.sem(PreClass[i]))
        PreI.append("{:.4f}".format(float(np.mean(PreClass[i]) - binf)))
        binf, bSup = stats.t.interval(0.95,
                                      len(listClass) - 1,
                                      loc=np.mean(np.mean(RcallClass[i])),
                                      scale=stats.sem(RcallClass[i]))
        RecI.append("{:.4f}".format(float(np.mean(RcallClass[i]) - binf)))
        binf, bSup = stats.t.interval(0.95,
                                      len(listClass) - 1,
                                      loc=np.mean(np.mean(FSClass[i])),
                                      scale=stats.sem(FSClass[i]))
        FSI.append("{:.4f}".format(float(np.mean(FSClass[i]) - binf)))

        Pre_S.append(str(PreMean[i]) + " +- " + str(PreI[i]))
        Rec_S.append(str(RecMean[i]) + " +- " + str(RecI[i]))
        FS_S.append(str(FSMean[i]) + " +- " + str(FSI[i]))

    KMean = "{:.3f}".format(float(np.mean(Kappa)))
    OAMean = "{:.3f}".format(float(np.mean(OA)))
    binf, bSup = stats.t.interval(0.95,
                                  len(listClass) - 1,
                                  loc=np.mean(np.mean(Kappa)),
                                  scale=stats.sem(Kappa))
    KI = "{:.4f}".format(float(np.mean(Kappa) - binf))
    binf, bSup = stats.t.interval(0.95,
                                  len(listClass) - 1,
                                  loc=np.mean(np.mean(OA)),
                                  scale=stats.sem(OA))
    OAI = "{:.4f}".format(float(np.mean(OA) - binf))

    resfile.write("KAPPA : %s +- %s\n" % (KMean, KI))
    resfile.write("OA : %s +- %s\n" % (OAMean, OAI))

    sizeClass = 0
    sizePre = 0
    sizeRec = 0
    sizeFS = 0
    sizeConf = 0

    for cl in listClass:
        if len(cl) > sizeClass:
            sizeClass = len(cl)
    for pr in Pre_S:
        if len(pr) > sizePre:
            if len("Precision moyenne") > sizePre:
                sizePre = len("Precision moyenne")
            else:
                sizePre = len(pr)
    for rec in Rec_S:
        if len(rec) > sizeRec:
            if len("Rappel moyen") > sizeRec:
                sizeRec = len("Rappel moyen")
            else:
                sizeRec = len(rec)
    for fs in FS_S:
        if len(fs) > sizeFS:
            if len("F-score moyen") > sizeFS:
                sizeFS = len("F-score moyen")
            else:
                sizeFS = len(fs)
    for ref, confusion in maxConf:
        if len(confusion) > sizeConf:
            if len("Confusion max") > sizeConf:
                sizeConf = len("Confusion max")
            else:
                sizeConf = len(fs)
    sep = ""
    for i in range(sizeClass + sizePre + sizeRec + sizeFS + sizeConf + 13):
        sep = sep + "-"

    resfile.write("\n%s | %s | %s | %s | %s\n" %
                  (CreateCell("Classes", sizeClass),
                   CreateCell("Precision moyenne",
                              sizePre), CreateCell("Rappel moyen", sizeRec),
                   CreateCell("F-score moyen",
                              sizeFS), CreateCell("Confusion max", sizeConf)))
    resfile.write("%s\n" % (sep))
    for i in range(len(listClass)):
        resfile.write(
            "%s | %s | %s | %s | %s\n" %
            (CreateCell(listClass[i], sizeClass), CreateCell(
                Pre_S[i], sizePre), CreateCell(Rec_S[i], sizeRec),
             CreateCell(FS_S[i], sizeFS), CreateCell(maxConf[i][1], sizeConf)))

    resfile.close()
Example #10
0
def ClassificationShaping(pathClassif, pathEnvelope, pathImg, fieldEnv, N,
                          pathOut, pathWd, pathConf, colorpath):
    f = file(pathConf)
    cfg = Config(f)

    Stack_ind = fu.getFeatStackName(pathConf)

    if pathWd == None:
        TMP = pathOut + "/TMP"
        if not os.path.exists(pathOut + "/TMP"):
            os.mkdir(TMP)
    else:
        TMP = pathWd
        if not os.path.exists(pathOut + "/TMP"):
            os.mkdir(pathOut + "/TMP")
    classifMode, pathTest, proj = cfg.argClassification.classifMode, cfg.chain.outputPath, \
                                  cfg.GlobChain.proj.split(":")[-1]
    AllTile, mode, pixType = cfg.chain.listTile.split(
        " "), cfg.chain.mode, cfg.argClassification.pixType
    featuresPath, outputStatistics, spatialResolution = cfg.chain.featuresPath, cfg.chain.outputStatistics, cfg.chain.spatialResolution
    allTMPFolder = fu.fileSearchRegEx(pathTest + "/TMPFOLDER*")
    if allTMPFolder:
        for tmpFolder in allTMPFolder:
            shutil.rmtree(tmpFolder)

    genGlobalConfidence(AllTile, pathTest, N, mode, classifMode, pathWd,
                        pathConf)

    if mode == "outside" and classifMode == "fusion":
        old_classif = fu.fileSearchRegEx(
            pathTest + "/classif/Classif_*_model_*f*_seed_*.tif")
        for rm in old_classif:
            print rm
            os.remove(rm)
            # os.system("mv "+rm+" "+pathTest+"/final/TMP/")

    classification = []
    confidence = []
    cloud = []
    for seed in range(N):
        classification.append([])
        confidence.append([])
        cloud.append([])
        sort = []
        if classifMode == "separate" or mode == "outside":
            AllClassifSeed = fu.FileSearch_AND(pathClassif, True, ".tif",
                                               "Classif", "seed_" + str(seed))
            ind = 1
        elif classifMode == "fusion":
            AllClassifSeed = fu.FileSearch_AND(
                pathClassif, True, "_FUSION_NODATA_seed" + str(seed) + ".tif")
            ind = 0
        for tile in AllClassifSeed:
            sort.append((tile.split("/")[-1].split("_")[ind], tile))
        sort = fu.sortByFirstElem(sort)
        for tile, paths in sort:
            exp = ""
            allCl = ""
            allCl_rm = []
            for i in range(len(paths)):
                allCl = allCl + paths[i] + " "
                allCl_rm.append(paths[i])
                if i < len(paths) - 1:
                    exp = exp + "im" + str(i + 1) + "b1 + "
                else:
                    exp = exp + "im" + str(i + 1) + "b1"
            path_Cl_final = TMP + "/" + tile + "_seed_" + str(seed) + ".tif"
            classification[seed].append(path_Cl_final)
            cmd = 'otbcli_BandMath -il ' + allCl + '-out ' + path_Cl_final + ' ' + pixType + ' -exp "' + exp + '"'
            print cmd
            os.system(cmd)

            for currentTileClassif in allCl_rm:
                os.remove(currentTileClassif)

            tileConfidence = pathOut + "/TMP/" + tile + "_GlobalConfidence_seed_" + str(
                seed) + ".tif"
            confidence[seed].append(tileConfidence)

            cloudTile = fu.FileSearch_AND(featuresPath + "/" + tile, True,
                                          "nbView.tif")[0]
            ClassifTile = TMP + "/" + tile + "_seed_" + str(seed) + ".tif"
            cloudTilePriority = pathTest + "/final/TMP/" + tile + "_Cloud.tif"
            cloudTilePriority_tmp = TMP + "/" + tile + "_Cloud.tif"

            cloudTilePriority_StatsOK = pathTest + "/final/TMP/" + tile + "_Cloud_StatsOK.tif"
            cloudTilePriority_tmp_StatsOK = TMP + "/" + tile + "_Cloud_StatsOK.tif"
            cloud[seed].append(cloudTilePriority)
            if not os.path.exists(cloudTilePriority):
                cmd_cloud = 'otbcli_BandMath -il ' + cloudTile + ' ' + ClassifTile + ' -out ' + cloudTilePriority_tmp + ' int16 -exp "im2b1>0?im1b1:0"'
                print cmd_cloud
                os.system(cmd_cloud)
                if outputStatistics == "True":
                    cmd_cloud = 'otbcli_BandMath -il ' + cloudTile + ' ' + ClassifTile + ' -out ' + cloudTilePriority_tmp_StatsOK + ' int16 -exp "im2b1>0?im1b1:-1"'
                    print cmd_cloud
                    os.system(cmd_cloud)
                    if pathWd:
                        shutil.copy(cloudTilePriority_tmp_StatsOK,
                                    cloudTilePriority_StatsOK)
                        os.remove(cloudTilePriority_tmp_StatsOK)

                if pathWd:
                    shutil.copy(cloudTilePriority_tmp, cloudTilePriority)
                    os.remove(cloudTilePriority_tmp)

    if pathWd != None:
        os.system("cp -a " + TMP + "/* " + pathOut + "/TMP")
    for seed in range(N):
        assembleFolder = pathTest + "/final"
        if pathWd: assembleFolder = pathWd
        fu.assembleTile_Merge(
            classification[seed], spatialResolution,
            assembleFolder + "/Classif_Seed_" + str(seed) + ".tif")
        if pathWd:
            shutil.copy(pathWd + "/Classif_Seed_" + str(seed) + ".tif",
                        pathTest + "/final")
        fu.assembleTile_Merge(
            confidence[seed], spatialResolution,
            assembleFolder + "/Confidence_Seed_" + str(seed) + ".tif")
        if pathWd:
            shutil.copy(pathWd + "/Confidence_Seed_" + str(seed) + ".tif",
                        pathTest + "/final")
        color.CreateIndexedColorImage(
            pathTest + "/final/Classif_Seed_" + str(seed) + ".tif", colorpath,
            pixType)

    fu.assembleTile_Merge(cloud[0], spatialResolution,
                          assembleFolder + "/PixelsValidity.tif")
    if pathWd: shutil.copy(pathWd + "/PixelsValidity.tif", pathTest + "/final")