def GenerateShapeTile(tiles, pathTiles, pathOut, pathWd, pathConf): f = file(pathConf) cfg = Config(f) proj = int(cfg.GlobChain.proj.split(":")[-1]) executionMode = cfg.chain.executionMode # tilesPath = [pathTiles+"/"+tile+"/Final/"+fu.getFeatStackName(pathConf) for tile in tiles] MaskCommunPath = "" if executionMode == "sequential": MaskCommunPath = "/tmp/" tilesPath = [ pathTiles + "/" + tile + MaskCommunPath + "/MaskCommunSL.tif" for tile in tiles ] ObjListTile = [ Tile(currentTile, name) for currentTile, name in zip(tilesPath, tiles) ] ObjListTile_sort = sorted(ObjListTile, key=priorityKey) tmpFile = pathOut + "/TMP" if pathWd: tmpFile = pathWd + "/TMP" if not os.path.exists(tmpFile): os.mkdir(tmpFile) genTileEnvPrio(ObjListTile_sort, pathOut, tmpFile, proj) AllPRIO = fu.FileSearch_AND(tmpFile, True, "_PRIO.shp") for prioTile in AllPRIO: tileName = prioTile.split("/")[-1].split("_")[0] fu.cpShapeFile(prioTile.replace(".shp", ""), pathOut + "/" + tileName, [".prj", ".shp", ".dbf", ".shx"]) shutil.rmtree(tmpFile)
def getAll_regions(tileName, folder): allRegion = [] allShape = fu.FileSearch_AND(folder, True, "learn", tileName, ".shp") for currentShape in allShape: currentRegion = currentShape.split("/")[-1].split("_")[2] if not currentRegion in allRegion: allRegion.append(currentRegion) return allRegion
def fusion(pathClassif, pathConf, pathWd): f = file(pathConf) cfg = Config(f) classifMode = cfg.argClassification.classifMode N = int(cfg.chain.runs) allTiles = cfg.chain.listTile.split(" ") fusionOptions = cfg.argClassification.fusionOptions mode = cfg.chain.mode pixType = cfg.argClassification.pixType if mode == "outside": AllClassif = fu.fileSearchRegEx(pathClassif + "/Classif_*_model_*f*_seed_*.tif") allTiles = [] models = [] for classif in AllClassif: mod = classif.split("/")[-1].split("_")[3].split("f")[0] tile = classif.split("/")[-1].split("_")[1] if not mod in models: models.append(mod) if not tile in allTiles: allTiles.append(tile) AllCmd = [] for seed in range(N): for tile in allTiles: directoryOut = pathClassif if pathWd != None: directoryOut = "$TMPDIR" if mode != "outside": classifPath = fu.FileSearch_AND(pathClassif, True, "Classif_" + tile, "seed_" + str(seed) + ".tif") allPathFusion = " ".join(classifPath) cmd = "otbcli_FusionOfClassifications -il " + allPathFusion + " " + fusionOptions + " -out " + directoryOut + "/" + tile + "_FUSION_seed_" + str( seed) + ".tif" AllCmd.append(cmd) else: for mod in models: classifPath = fu.fileSearchRegEx(pathClassif + "/Classif_" + tile + "_model_" + mod + "f*_seed_" + str(seed) + ".tif") if len(classifPath) != 0: allPathFusion = " ".join(classifPath) cmd = "otbcli_FusionOfClassifications -il " + allPathFusion + " " + fusionOptions + " -out " + directoryOut + "/" + tile + "_FUSION_model_" + mod + "_seed_" + str( seed) + ".tif " + pixType AllCmd.append(cmd) tmp = pathClassif.split("/") if pathClassif[-1] == "/": del tmp[-1] tmp[-1] = "cmd/fusion" pathToCmdFusion = "/".join(tmp) fu.writeCmds(pathToCmdFusion + "/fusion.txt", AllCmd) return AllCmd
def generateRegionShape(mode, pathTiles, pathToModel, pathOut, fieldOut, pathConf, pathWd): """ create one shapeFile where all features belong to a model number according to the model description IN : - mode : "one_region" or "multi_regions" if one_region is selected, the output shapeFile will contain only one region constructed with all tiles in pathTiles if multi_regions is selected, the output shapeFile will contain per feature a model number according to the text file pathToModel - pathTiles : path to the tile's envelope with priority consideration ex : /xx/x/xxx/x /!\ the folder which contain the envelopes must contain only the envelopes <======== - pathToModel : path to the text file which describe which tile belong to which model the text file must have the following format : R1 : D0003H0005,D0004H0005 R2 : D0005H0005,D0005H0004 R3 : D0003H0004,D0004H0004 R4 : D0003H0003,D0004H0003,D0005H0003 for 4 models and 9 tiles - pathOut : path to store the resulting shapeFile - fieldOut : the name of the field which will contain the model number ex : "Mod" - pathWd : path to working directory (not mandatory, due to cluster's architecture default = None) OUT : a shapeFile which contains for all feature the model number which it belong to """ f = file(pathConf) cfg = Config(f) proj = cfg.GlobChain.proj.split(":")[-1] region = [] if mode == "one_region": AllTiles = fu.FileSearch_AND(pathTiles, False, ".shp") region.append(AllTiles) elif mode == "multi_regions": if not pathToModel: raise Exception( 'if multi_regions is selected, you must specify a test file which describe the model' ) with open(pathToModel, "r") as modelFile: for inLine in modelFile: region.append( inLine.rstrip('\n\r').split(":")[-1].replace( " ", "").split(",")) p_f = pathOut.replace(" ", "").split("/") outName = p_f[-1].split(".")[0] pathMod = "" for i in range(1, len(p_f) - 1): pathMod = pathMod + "/" + p_f[i] CreateModelShapeFromTiles(region, pathTiles, int(proj), pathMod, outName, fieldOut, pathWd)
def getPaths(TileFolder, pattern): Tiles = os.listdir(TileFolder) paths = [] for currentS2Tile in Tiles: if os.path.isdir(TileFolder + "/" + currentS2Tile): stack = fu.FileSearch_AND(TileFolder + "/" + currentS2Tile, True, pattern) if stack: paths.append(stack[0]) return paths
def getModel(pathShapes): sort = [] pathAppVal = fu.FileSearch_AND(pathShapes, True, "seed0", ".shp", "learn") for path in pathAppVal: try: ind = sort.index((int(path.split("/")[-1].split("_")[-3]), path.split("/")[-1].split("_")[0])) except ValueError: sort.append((path.split("/")[-1].split("_")[-3], path.split("/")[-1].split("_")[0])) return fu.sortByFirstElem( sort) # [(RegionNumber,[tile1,tile2,...]),(...),...]
def genNbView(TilePath, maskOut, nbview, workingDirectory=None): """ """ nameNbView = "nbView.tif" wd = TilePath if workingDirectory: wd = workingDirectory tmp1 = wd + "/" + nameNbView if not os.path.exists(TilePath + "/" + nameNbView): # build stack MaskStack = "AllSensorMask.tif" maskList = fu.FileSearch_AND(TilePath, True, "_ST_MASK.tif") maskList = " ".join(maskList) # cmd = "gdalbuildvrt "+TilePath+"/"+MaskStack+" "+maskList cmd = "otbcli_ConcatenateImages -il " + maskList + " -out " + TilePath + "/" + MaskStack + " int16" print cmd os.system(cmd) exp = buildExpression_cloud(TilePath + "/" + MaskStack) tmp2 = maskOut.replace(".shp", "_tmp_2.tif").replace(TilePath, wd) cmd = 'otbcli_BandMath -il ' + TilePath + "/" + MaskStack + ' -out ' + tmp1 + ' uint16 -exp "' + exp + '"' print cmd os.system(cmd) cmd = 'otbcli_BandMath -il ' + tmp1 + ' -out ' + tmp2 + ' -exp "im1b1>=' + str( nbview) + '?1:0"' print cmd os.system(cmd) maskOut_tmp = maskOut.replace(".shp", "_tmp.shp").replace(TilePath, wd) cmd = "gdal_polygonize.py -mask " + tmp2 + " " + tmp2 + " -f \"ESRI Shapefile\" " + maskOut_tmp print cmd os.system(cmd) fu.erodeShapeFile(maskOut_tmp, wd + "/" + maskOut.split("/")[-1], 0.1) os.remove(tmp2) fu.removeShape(maskOut_tmp.replace(".shp", ""), [".prj", ".shp", ".dbf", ".shx"]) if workingDirectory: shutil.copy(tmp1, TilePath) fu.cpShapeFile(wd + "/" + maskOut.split("/")[-1].replace(".shp", ""), TilePath, [".prj", ".shp", ".dbf", ".shx"], spe=True)
def confFusion(shapeIn, dataField, csv_out, txt_out, csvPath, pathConf): f = file(pathConf) cfg = Config(f) N = int(cfg.chain.runs) cropMix = Config(file(pathConf)).argTrain.cropMix annualCrop = Config(file(pathConf)).argTrain.annualCrop labelReplacement, labelName = Config( file(pathConf)).argTrain.ACropLabelReplacement labelReplacement = int(labelReplacement) for seed in range(N): # Recherche de toute les classes possible AllClass = [] AllClass = fu.getFieldElement(shapeIn, "ESRI Shapefile", dataField, "unique") AllClass = sorted(AllClass) # Initialisation de la matrice finale AllConf = fu.FileSearch_AND(csvPath, True, "seed_" + str(seed) + ".csv") csv = fu.confCoordinatesCSV(AllConf) csv_f = fu.sortByFirstElem(csv) confMat = fu.gen_confusionMatrix(csv_f, AllClass) if cropMix == 'True': writeCSV(confMat, AllClass, csv_out + "/MatrixBeforeClassMerge_" + str(seed) + ".csv") confMat, AllClass = replaceAnnualCropInConfMat( confMat, AllClass, annualCrop, labelReplacement) writeCSV(confMat, AllClass, csv_out + "/Classif_Seed_" + str(seed) + ".csv") else: writeCSV(confMat, AllClass, csv_out + "/Classif_Seed_" + str(seed) + ".csv") nbrGood = confMat.trace() nbrSample = confMat.sum() overallAccuracy = float(nbrGood) / float(nbrSample) kappa = computeKappa(confMat) Pre = computePreByClass(confMat, AllClass) Rec = computeRecByClass(confMat, AllClass) Fs = computeFsByClass(Pre, Rec, AllClass) writeResults( Fs, Rec, Pre, kappa, overallAccuracy, AllClass, txt_out + "/ClassificationResults_seed_" + str(seed) + ".txt")
def GetFeatList(feature, opath): """ Gets the list of features in a directory, used for NDVI, NDWI, Brightness ARGs: INPUT: -feature: the name of the feature """ imageList = [] IMG = fu.FileSearch_AND(opath + "/" + feature, True, feature, ".tif") IMG = sorted(IMG) print opath + "/" + feature print "les images :" print IMG # for image in glob.glob(opath+"/"+feature+"/"+feature+"*.tif"): for image in IMG: imagePath = image.split('/') imageList.append(imagePath[-1]) return imageList
def createRegionsByTiles(shapeRegion, field_Region, pathToEnv, pathOut, pathWd): """ create a shapeFile into tile's envelope for each regions in shapeRegion and for each tiles IN : - shapeRegion : the shape which contains all regions - field_Region : the field into the region's shape which describes each tile belong to which model - pathToEnv : path to the tile's envelope with priority - pathOut : path to store all resulting shapeFile - pathWd : path to working directory (not mandatory, due to cluster's architecture default = None) """ pathName = pathWd if pathWd == None: # sequential case pathName = pathOut # getAllTiles AllTiles = fu.FileSearch_AND(pathToEnv, True, ".shp") regionList = fu.getFieldElement(shapeRegion, "ESRI Shapefile", field_Region, "unique") shpRegionList = splitVectorLayer(shapeRegion, field_Region, "int", regionList, pathName) AllClip = [] for shp in shpRegionList: for tile in AllTiles: pathToClip = fu.ClipVectorData(shp, tile, pathName) AllClip.append(pathToClip) if pathWd: for clip in AllClip: cmd = "cp " + clip.replace(".shp", "*") + " " + pathOut print cmd os.system(cmd) else: for shp in shpRegionList: path = shp.replace(".shp", "") os.remove(path + ".shp") os.remove(path + ".shx") os.remove(path + ".dbf") os.remove(path + ".prj") return AllClip
def launchClassification(model, pathConf, stat, pathToRT, pathToImg, pathToRegion, fieldRegion, N, pathToCmdClassif, pathOut, pathWd): f = file(pathConf) cfg = Config(f) classif = cfg.argTrain.classifier mode = cfg.chain.mode outputPath = cfg.chain.outputPath classifMode = cfg.argClassification.classifMode regionMode = cfg.chain.mode pixType = cfg.argClassification.pixType bindingPy = cfg.GlobChain.bindingPython Stack_ind = fu.getFeatStackName(pathConf) AllCmd = [] allTiles_s = cfg.chain.listTile allTiles = allTiles_s.split(" ") maskFiles = pathOut + "/MASK" if not os.path.exists(maskFiles): os.system("mkdir " + maskFiles) shpRName = pathToRegion.split("/")[-1].replace(".shp", "") AllModel = fu.FileSearch_AND(model, True, "model", ".txt") AllModel = [f for f in AllModel if os.path.splitext(f)[1] == '.txt'] for path in AllModel: model = path.split("/")[-1].split("_")[1] tiles = fu.getListTileFromModel( model, outputPath + "/config_model/configModel.cfg") model_Mask = model if re.search('model_.*f.*_', path.split("/")[-1]): model_Mask = path.split("/")[-1].split("_")[1].split("f")[0] seed = path.split("/")[-1].split("_")[-1].replace(".txt", "") tilesToEvaluate = tiles if ("fusion" in classifMode and regionMode != "outside") or (regionMode == "one_region"): tilesToEvaluate = allTiles # construction du string de sortie for tile in tilesToEvaluate: pathToFeat = pathToImg + "/" + tile + "/Final/" + Stack_ind if bindingPy == "True": pathToFeat = fu.FileSearch_AND( pathToImg + "/" + tile + "/tmp/", True, ".tif")[0] maskSHP = pathToRT + "/" + shpRName + "_region_" + model_Mask + "_" + tile + ".shp" maskTif = shpRName + "_region_" + model_Mask + "_" + tile + ".tif" CmdConfidenceMap = "" confidenceMap = "" if "fusion" in classifMode: if mode != "outside": tmp = pathOut.split("/") if pathOut[-1] == "/": del tmp[-1] tmp[-1] = "envelope" pathToEnvelope = "/".join(tmp) maskSHP = pathToEnvelope + "/" + tile + ".shp" confidenceMap = tile + "_model_" + model + "_confidence_seed_" + seed + ".tif" CmdConfidenceMap = " -confmap " + pathOut + "/" + confidenceMap if not os.path.exists(maskFiles + "/" + maskTif): pathToMaskCommun = pathToImg + "/" + tile + "/tmp/MaskCommunSL.shp" # cas cluster if pathWd != None: pathToMaskCommun = pathToImg + "/" + tile + "/MaskCommunSL.shp" maskFiles = pathWd nameOut = fu.ClipVectorData(maskSHP, pathToMaskCommun, maskFiles, maskTif.replace(".tif", "")) cmdRaster = "otbcli_Rasterization -in " + nameOut + " -mode attribute -mode.attribute.field " + fieldRegion + " -im " + pathToFeat + " -out " + maskFiles + "/" + maskTif if "fusion" in classifMode: cmdRaster = "otbcli_Rasterization -in " + nameOut + " -mode binary -mode.binary.foreground 1 -im " + pathToFeat + " -out " + maskFiles + "/" + maskTif print cmdRaster os.system(cmdRaster) if pathWd != None: os.system("cp " + pathWd + "/" + maskTif + " " + pathOut + "/MASK") out = pathOut + "/Classif_" + tile + "_model_" + model + "_seed_" + seed + ".tif" # hpc case if pathWd != None: out = "$TMPDIR/Classif_" + tile + "_model_" + model + "_seed_" + seed + ".tif" CmdConfidenceMap = " -confmap $TMPDIR/" + confidenceMap appli = "otbcli_ImageClassifier " pixType_cmd = pixType if bindingPy == "True": appli = "python bPy_ImageClassifier.py -conf " + pathConf + " " pixType_cmd = " -pixType " + pixType cmd = appli + " -in " + pathToFeat + " -model " + path + " -mask " + pathOut + "/MASK/" + maskTif + " -out " + out + " " + pixType_cmd + " -ram 128 " + CmdConfidenceMap # Ajout des stats lors de la phase de classification if ("svm" in classif): cmd = cmd + " -imstat " + stat + "/Model_" + str( model) + ".xml" AllCmd.append(cmd) fu.writeCmds(pathToCmdClassif + "/class.txt", AllCmd) return AllCmd
def PreProcessS2(config, tileFolder, workingDirectory): cfg = Config(args.config) struct = cfg.Sentinel_2.arbo outputPath = Config(file(config)).chain.outputPath outRes = Config(file(config)).chain.spatialResolution projOut = Config(file(config)).GlobChain.proj projOut = projOut.split(":")[-1] arbomask = Config(file(config)).Sentinel_2.arbomask cloud = Config(file(config)).Sentinel_2.nuages sat = Config(file(config)).Sentinel_2.saturation div = Config(file(config)).Sentinel_2.div #cloud_reproj = Config(file(config)).Sentinel_2.nuages_reproj #sat_reproj = Config(file(config)).Sentinel_2.saturation_reproj #div_reproj = Config(file(config)).Sentinel_2.div_reproj needReproj = False B5 = fu.fileSearchRegEx(tileFolder + "/" + struct + "/../*FRE_B5*.tif") B6 = fu.fileSearchRegEx(tileFolder + "/" + struct + "/../*FRE_B6*.tif") B7 = fu.fileSearchRegEx(tileFolder + "/" + struct + "/../*FRE_B7*.tif") B8A = fu.fileSearchRegEx(tileFolder + "/" + struct + "/../*FRE_B8A*.tif") B11 = fu.fileSearchRegEx(tileFolder + "/" + struct + "/../*FRE_B11*.tif") B12 = fu.fileSearchRegEx(tileFolder + "/" + struct + "/../*FRE_B12*.tif") AllBands = B5 + B6 + B7 + B8A + B11 + B12 # AllBands to resample # Resample for band in AllBands: x, y = fu.getRasterResolution(band) folder = "/".join(band.split("/")[0:len(band.split("/")) - 1]) pathOut = folder nameOut = band.split("/")[-1].replace(".tif", "_10M.tif") if workingDirectory: # HPC pathOut = workingDirectory cmd = "otbcli_RigidTransformResample -in " + band + " -out " + pathOut + "/" + nameOut + " int16 -transform.type.id.scalex 2 -transform.type.id.scaley 2 -interpolator bco -interpolator.bco.radius 2" if str(x) != str(outRes): needReproj = True if str(x) != str(outRes) and not os.path.exists(folder + "/" + nameOut) and not "10M_10M.tif" in nameOut: print cmd os.system(cmd) if workingDirectory: # HPC shutil.copy(pathOut + "/" + nameOut, folder + "/" + nameOut) os.remove(pathOut + "/" + nameOut) # Datas reprojection and buid stack dates = os.listdir(tileFolder) for date in dates: # Masks reprojection AllCloud = fu.FileSearch_AND(tileFolder + "/" + date, True, cloud) AllSat = fu.FileSearch_AND(tileFolder + "/" + date, True, sat) AllDiv = fu.FileSearch_AND(tileFolder + "/" + date, True, div) for Ccloud, Csat, Cdiv in zip(AllCloud, AllSat, AllDiv): cloudProj = fu.getRasterProjectionEPSG(Ccloud) satProj = fu.getRasterProjectionEPSG(Csat) divProj = fu.getRasterProjectionEPSG(Cdiv) # Generate reproj even if no reprojection is needed (to improve!) outFolder = os.path.split(Ccloud)[0] cloudOut = os.path.split(Ccloud)[1].replace(".tif", "_reproj.tif") if int(cloudProj) != int(projOut): tmpInfo = outFolder + "/ImgInfo.txt" spx, spy = fu.getRasterResolution(Ccloud) cmd = 'gdalwarp -wo INIT_DEST=0 -tr ' + str(spx) + ' ' + str(spx) + ' -s_srs "EPSG:' + str( cloudProj) + '" -t_srs "EPSG:' + str( projOut) + '" ' + Ccloud + ' ' + workingDirectory + "/" + cloudOut if not os.path.exists(outFolder + "/" + cloudOut): print cmd os.system(cmd) shutil.copy(workingDirectory + "/" + cloudOut, outFolder + "/" + cloudOut) else: shutil.copy(Ccloud, outFolder + "/" + cloudOut) outFolder = os.path.split(Csat)[0] satOut = os.path.split(Csat)[1].replace(".tif", "_reproj.tif") if int(satProj) != int(projOut): tmpInfo = outFolder + "/ImgInfo.txt" spx, spy = fu.getRasterResolution(Csat) cmd = 'gdalwarp -wo INIT_DEST=0 -tr ' + str(spx) + ' ' + str(spx) + ' -s_srs "EPSG:' + str( cloudProj) + '" -t_srs "EPSG:' + str(projOut) + '" ' + Csat + ' ' + workingDirectory + "/" + satOut if not os.path.exists(outFolder + "/" + satOut): print cmd os.system(cmd) shutil.copy(workingDirectory + "/" + satOut, outFolder + "/" + satOut) else: shutil.copy(Csat, outFolder + "/" + satOut) outFolder = os.path.split(Cdiv)[0] divOut = os.path.split(Cdiv)[1].replace(".tif", "_reproj.tif") if int(divProj) != int(projOut): tmpInfo = outFolder + "/ImgInfo.txt" #reverse = workingDirectory + "/" + divOut.replace(".tif", "_reverse.tif") spx, spy = fu.getRasterResolution(Cdiv) if not os.path.exists(outFolder + "/" + divOut): # cmd = 'otbcli_BandMath -il '+Cdiv+' -out '+reverse+' -exp "im1b1==0?1:0"' # print cmd # os.system(cmd) cmd = 'gdalwarp -wo INIT_DEST=1 -tr ' + str(spx) + ' ' + str(spx) + ' -s_srs "EPSG:' + str( cloudProj) + '" -t_srs "EPSG:' + str( projOut) + '" ' + Cdiv + ' ' + workingDirectory + "/" + divOut print cmd os.system(cmd) shutil.copy(workingDirectory + "/" + divOut, outFolder + "/" + divOut) else: shutil.copy(Cdiv, outFolder + "/" + divOut) #################################### B2 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B2*.tif")[0] B3 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B3*.tif")[0] B4 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B4*.tif")[0] B5 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B5_*.tif")[0] B6 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B6_*.tif")[0] B7 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B7_*.tif")[0] B8 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B8*.tif")[0] B8A = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B8A_*.tif")[0] B11 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B11_*.tif")[0] B12 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B12_*.tif")[0] if needReproj: B5 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B5*_10M.tif")[0] B6 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B6*_10M.tif")[0] B7 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B7*_10M.tif")[0] B8 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B8.tif")[0] B8A = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B8A*_10M.tif")[0] B11 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B11*_10M.tif")[0] B12 = fu.fileSearchRegEx(tileFolder + "/" + date + "/*FRE_B12*_10M.tif")[0] listBands = B2 + " " + B3 + " " + B4 + " " + B5 + " " + B6 + " " + B7 + " " + B8 + " " + B8A + " " + B11 + " " + B12 # listBands = B3+" "+B4+" "+B8 print listBands currentProj = fu.getRasterProjectionEPSG(B3) stackName = "_".join(B3.split("/")[-1].split("_")[0:7]) + "_STACK.tif" stackNameProjIN = "_".join(B3.split("/")[-1].split("_")[0:7]) + "_STACK_EPSG" + str(currentProj) + ".tif" if os.path.exists(tileFolder + "/" + date + "/" + stackName): stackProj = fu.getRasterProjectionEPSG(tileFolder + "/" + date + "/" + stackName) if int(stackProj) != int(projOut): print "stack proj : " + str(stackProj) + " outproj : " + str(projOut) tmpInfo = tileFolder + "/" + date + "/ImgInfo.txt" spx, spy = fu.getGroundSpacing(tileFolder + "/" + date + "/" + stackName, tmpInfo) cmd = 'gdalwarp -tr ' + str(spx) + ' ' + str(spx) + ' -s_srs "EPSG:' + str( stackProj) + '" -t_srs "EPSG:' + str( projOut) + '" ' + tileFolder + "/" + date + "/" + stackName + ' ' + workingDirectory + "/" + stackName print cmd os.system(cmd) os.remove(tileFolder + "/" + date + "/" + stackName) shutil.copy(workingDirectory + "/" + stackName, tileFolder + "/" + date + "/" + stackName) os.remove(workingDirectory + "/" + stackName) else: cmd = "otbcli_ConcatenateImages -il " + listBands + " -out " + workingDirectory + "/" + stackNameProjIN + " int16" print cmd os.system(cmd) currentProj = fu.getRasterProjectionEPSG(workingDirectory + "/" + stackNameProjIN) tmpInfo = workingDirectory + "/ImgInfo.txt" spx, spy = fu.getRasterResolution(workingDirectory + "/" + stackNameProjIN) if str(currentProj) == str(projOut): shutil.copy(workingDirectory + "/" + stackNameProjIN, tileFolder + "/" + date + "/" + stackName) os.remove(workingDirectory + "/" + stackNameProjIN) else: cmd = 'gdalwarp -tr ' + str(spx) + ' ' + str(spx) + ' -s_srs "EPSG:' + str( currentProj) + '" -t_srs "EPSG:' + str( projOut) + '" ' + workingDirectory + "/" + stackNameProjIN + ' ' + workingDirectory + "/" + stackName print cmd os.system(cmd) shutil.copy(workingDirectory + "/" + stackName, tileFolder + "/" + date + "/" + stackName)
else: for sensor in list_Sensor: red = str(sensor.bands["BANDS"]["red"]) nir = str(sensor.bands["BANDS"]["NIR"]) swir = str(sensor.bands["BANDS"]["SWIR"]) comp = str(len(sensor.bands["BANDS"].keys())) serieTempGap = sensor.serieTempGap outputFeatures = args.opath + "/Features_" + sensor.name + ".tif" cmd = "otbcli_iota2FeatureExtraction -in " + serieTempGap + " -out " + outputFeatures + " int16 -comp " + comp + " -red " + red + " -nir " + nir + " -swir " + swir + " -copyinput true" print cmd deb = time.time() os.system(cmd) fin = time.time() print "Temps de production des primitives (BATCH) : " + str(fin - deb) AllFeatures = fu.FileSearch_AND(args.opath, True, "Features", ".tif") if len(AllFeatures) == 1: if not os.path.exists(args.opath + "/Final/"): os.system("mkdir " + args.opath + "/Final/") if userFeatPath: cmdUFeat = "otbcli_ConcatenateImages -il " + AllFeatures[0] + " " + allUserFeatures + " -out " + \ AllFeatures[0] print cmdUFeat os.system(cmdUFeat) shutil.copy(AllFeatures[0], Stack) os.remove(AllFeatures[0]) elif len(AllFeatures) > 1: AllFeatures_s = " ".join(AllFeatures) cmd = "otbcli_ConcatenateImages -il " + AllFeatures_s + " -out " + args.opath + "/Final/" + StackName print cmd os.system(cmd)
def generateRepartition(pathTest, config, rep_model, rep_model_repCore, dataField): shapeApp = pathTest + "/dataAppVal" regionTiles = pathTest + "/shapeRegion" seuilClass = 0.1 # seuil ? NbTuiles = 1 # Nb de tuiles dans lequel il est possible de venir piocher des polygones resol = 30 * 30 f = file(config) cfg = Config(f) N = int(cfg.chain.runs) AllTiles = cfg.chain.listTile.split(" ") # Récupération des modèles AllModel = [] listModel = fu.FileSearch_AND(regionTiles, True, ".shp") for shape in listModel: model = shape.split("/")[-1].split("_")[-2] try: ind = AllModel.index(model) except ValueError: AllModel.append(model) # pour chaque model et chaque seed, regarder la répartition des classes repM = [] # [(model,seed,[[class0,area0],[class1,area1]...]),(...)] repT = [] # [(tile,seed,[[class0,area0],[class1,area1]...]),(...)] N = 1 # <---------------------------------------------------------- for seed in range(N): for model in AllModel: listShapeModel = fu.FileSearch_AND( shapeApp, True, "region_" + model, "seed" + str(seed) + "_learn.shp") modelRep = repartitionInShape(listShapeModel, dataField, resol) repM.append((model, seed, modelRep)) for tile in AllTiles: listShapeModel = fu.FileSearch_AND( shapeApp, True, tile + "_region_", "seed" + str(seed) + "_learn.shp") tileRep = repartitionInShape(listShapeModel, dataField, resol) repT.append((tile, seed, tileRep)) # compute all statistics by class for a given model listClassRep = [] for m, seed, classRep in repM: for cl, rep in classRep: listClassRep.append([cl, rep]) # Calcul de concentration des classes d = defaultdict(list) for k, v in listClassRep: d[k].append(v) rep_tmp = list(d.items()) rep = [] for cl, repModel in rep_tmp: rep.append((cl, np.asarray(repModel).mean())) repartition = sorted( rep, key=getSeconde) # rep = [(class,présenceMoyenne),()...] # présence à l'équilibre d'une classe dans un modèle = présence moyenne dans rep less = [] for m, seed, classRep in repM: for cl, rep in classRep: for cl_m, rep_m in repartition: if cl == cl_m: if rep < seuilClass * rep_m: less.append((m, cl, rep)) repCore = [] corrections = [] for m, cl, sample in less: for N in range(NbTuiles): maxS = 0 cpti = 0 for tile, seed, classRep in repT: cptj = 0 for cl_t, rep_t in classRep: if cl_t == cl and rep_t > maxS: maxS = rep_t tileMax = tile repT[cpti][2][cptj][1] = 0 cptj += 1 cpti += 1 repCore.append([int(m), tileMax]) corrections.append([int(m), tileMax]) # print repCore # lecture du fichier de repartition des tuiles par modèles modelTile = [] fileModel = open(rep_model, "r") regNumber = 0 while 1: data = fileModel.readline().rstrip('\n\r') if data == "": break line = data.split(":")[-1] tiles = line.replace(" ", "").split(",") for tile_m in tiles: repCore.append([regNumber + 1, tile_m]) modelTile.append([tile_m, regNumber + 1]) regNumber += 1 fileModel.close() d = defaultdict(list) for k, v in repCore: d[k].append(v) repCore = list(d.items()) repCore = sorted(repCore, key=getFirst) # Création des shp de val/App for model_cor, tiles_cor in repCore: for tile_cor in tiles_cor: if not os.path.exists(shapeApp + "/" + tile + "_region_" + str(model_cor) + "_seed" + str(seed) + "_learn.shp"): learnShp = fu.FileSearch_AND( shapeApp, True, tile, "seed" + str(seed) + "_learn.shp") cmd1 = "cp " + learnShp[ 0] + " " + shapeApp + "/" + tile + "_region_" + str( model_cor) + "_seed" + str(seed) + "_learn.shp" cmd2 = "cp " + learnShp[0].replace( ".shp", ".shx" ) + " " + shapeApp + "/" + tile + "_region_" + str( model_cor) + "_seed" + str(seed) + "_learn.shx" cmd3 = "cp " + learnShp[0].replace( ".shp", ".dbf" ) + " " + shapeApp + "/" + tile + "_region_" + str( model_cor) + "_seed" + str(seed) + "_learn.dbf" cmd4 = "cp " + learnShp[0].replace( ".shp", ".prj" ) + " " + shapeApp + "/" + tile + "_region_" + str( model_cor) + "_seed" + str(seed) + "_learn.prj" cmd5 = "cp " + learnShp[0].replace( "_learn.shp", "_val.shx" ) + " " + shapeApp + "/" + tile + "_region_" + str( model_cor) + "_seed" + str(seed) + "_val.shp" cmd6 = "cp " + learnShp[0].replace( "_learn.shp", "_val.shx" ) + " " + shapeApp + "/" + tile + "_region_" + str( model_cor) + "_seed" + str(seed) + "_val.shx" cmd7 = "cp " + learnShp[0].replace( "_learn.shp", "_val.dbf" ) + " " + shapeApp + "/" + tile + "_region_" + str( model_cor) + "_seed" + str(seed) + "_val.dbf" cmd8 = "cp " + learnShp[0].replace( "_learn.shp", "_val.prj" ) + " " + shapeApp + "/" + tile + "_region_" + str( model_cor) + "_seed" + str(seed) + "_val.prj" os.system(cmd1) os.system(cmd2) os.system(cmd3) os.system(cmd4) os.system(cmd5) os.system(cmd6) os.system(cmd7) os.system(cmd8) # écriture du nouveau fichier corFile = open(rep_model_repCore, "w") for i in range(len(repCore)): corFile.write("m" + str(i + 1) + " : ") for j in range(len(repCore[i][1])): if j < len(repCore[i][1]) - 1: corFile.write(repCore[i][1][j] + ",") else: corFile.write(repCore[i][1][j] + "\n") corFile.close() # copie des shp (qui servent de mask pour la classification) de région/par tuiles for model_cor, tiles_cor in corrections: for tile_mt, mt in modelTile: if tile_mt == tiles_cor: maskShp = fu.FileSearch_AND(regionTiles, True, str(mt) + "_" + tile_mt, ".shp") fileName = maskShp[0].split("/")[-1].split(".")[0] fileName_out = fileName.replace( "region_" + str(mt) + "_", "region_" + str(model_cor) + "_") cmd1 = "cp " + regionTiles + "/" + fileName + ".shp " + regionTiles + "/" + fileName_out + ".shp " cmd2 = "cp " + regionTiles + "/" + fileName + ".shx " + regionTiles + "/" + fileName_out + ".shx " cmd3 = "cp " + regionTiles + "/" + fileName + ".dbf " + regionTiles + "/" + fileName_out + ".dbf " cmd4 = "cp " + regionTiles + "/" + fileName + ".prj " + regionTiles + "/" + fileName_out + ".prj " os.system(cmd1) os.system(cmd2) os.system(cmd3) os.system(cmd4)
def genJob(jobPath, testPath, logPath, pathConf): f = file(pathConf) cfg = Config(f) pathToJob = jobPath + "/extractData.pbs" if os.path.exists(pathToJob): os.remove(pathToJob) AllShape = fu.FileSearch_AND(testPath + "/shapeRegion", True, ".shp") nbShape = len(AllShape) if nbShape > 1: jobFile = open(pathToJob, "w") jobFile.write('#!/bin/bash\n\ #PBS -N extractData\n\ #PBS -J 0-%d:1\n\ #PBS -l select=1:ncpus=3:mem=20000mb\n\ #PBS -l walltime=80:00:00\n\ \n\ module load python/2.7.5\n\ module remove xerces/2.7\n\ module load xerces/2.8\n\ module load gdal/1.11.0-py2.7\n\ \n\ FileConfig=%s\n\ export ITK_AUTOLOAD_PATH=""\n\ #export OTB_HOME=$(grep --only-matching --perl-regex "^((?!#).)*(?<=OTB_HOME\:).*" $FileConfig | cut -d "\'" -f 2)\n\ #. $OTB_HOME/config_otb.sh\n\ . /home/user13/theia_oso/vincenta/OTB_5_3/config_otb.sh\n\ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=3\n\ \n\ PYPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=pyAppPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ GROUNDTRUTH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=groundTruth\:).*" $FileConfig | cut -d "\'" -f 2)\n\ TESTPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=outputPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ TILEPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=featuresPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ CONFIG=$FileConfig\n\ cd $PYPATH\n\ \n\ listData=($(find $TESTPATH/shapeRegion -maxdepth 1 -type f -name "*.shp"))\n\ path=${listData[${PBS_ARRAY_INDEX}]}\n\ python ExtractDataByRegion.py -conf $CONFIG -shape.region $path -shape.data $GROUNDTRUTH -out $TESTPATH/dataRegion --wd $TMPDIR -path.feat $TILEPATH' % ( nbShape - 1, pathConf)) jobFile.close() else: jobFile = open(pathToJob, "w") jobFile.write('#!/bin/bash\n\ #PBS -N extractData\n\ #PBS -l select=1:ncpus=3:mem=20000mb\n\ #PBS -l walltime=50:00:00\n\ #PBS -o %s/extractData_out.log\n\ #PBS -e %s/extractData_err.log\n\ \n\ module load python/2.7.5\n\ module remove xerces/2.7\n\ module load xerces/2.8\n\ module load gdal/1.11.0-py2.7\n\ \n\ FileConfig=%s\n\ export ITK_AUTOLOAD_PATH=""\n\ #export OTB_HOME=$(grep --only-matching --perl-regex "^((?!#).)*(?<=OTB_HOME\:).*" $FileConfig | cut -d "\'" -f 2)\n\ #. $OTB_HOME/config_otb.sh\n\ . /home/user13/theia_oso/vincenta/OTB_5_3/config_otb.sh\n\ \n\ PYPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=pyAppPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ GROUNDTRUTH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=groundTruth\:).*" $FileConfig | cut -d "\'" -f 2)\n\ TESTPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=outputPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ TILEPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=featuresPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ CONFIG=$FileConfig\n\ cd $PYPATH\n\ \n\ listData=($(find $TESTPATH/shapeRegion -maxdepth 1 -type f -name "*.shp"))\n\ path=${listData[0]}\n\ echo $GROUNDTRUTH\n\ python ExtractDataByRegion.py -conf $CONFIG -shape.region $path -shape.data $GROUNDTRUTH -out $TESTPATH/dataRegion -path.feat $TILEPATH --wd $TMPDIR' % ( logPath, logPath, pathConf)) jobFile.close()
def genJob(jobPath, testPath, logPath, pathConf): f = file(pathConf) cfg = Config(f) pathToJob = jobPath + "/vectorSampler.pbs" if os.path.exists(pathToJob): os.system("rm " + pathToJob) AllTrainShape = fu.FileSearch_AND(testPath + "/dataAppVal", True, "learn.shp") nbShape = len(AllTrainShape) if nbShape > 1: jobFile = open(pathToJob, "w") jobFile.write('#!/bin/bash\n\ #PBS -N vectorSampler\n\ #PBS -J 0-%s:1\n\ #PBS -l select=1:ncpus=5:mem=10000mb\n\ #PBS -m be\n\ #PBS -l walltime=40:00:00\n\ \n\ \n\ module load python/2.7.5\n\ module remove xerces/2.7\n\ module load xerces/2.8\n\ module load gdal/1.11.0-py2.7\n\ \n\ FileConfig=%s\n\ export OTB_HOME=$(grep --only-matching --perl-regex "^((?!#).)*(?<=OTB_HOME\:).*" $FileConfig | cut -d "\'" -f 2)\n\ . $OTB_HOME/config_otb.sh\n\ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=5\n\ \n\ PYPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=pyAppPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ TESTPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=outputPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ cd $PYPATH\n\ \n\ listData=($(find $TESTPATH/dataAppVal -maxdepth 1 -type f -name "*learn.shp"))\n\ InShape=${listData[${PBS_ARRAY_INDEX}]}\n\ echo $InShape\n\ python vectorSampler.py -shape $InShape -conf $FileConfig --wd $TMPDIR' % (nbShape - 1, pathConf)) jobFile.close() else: jobFile = open(pathToJob, "w") jobFile.write('#!/bin/bash\n\ #PBS -N vectorSampler\n\ #PBS -l select=1:ncpus=5:mem=20000mb\n\ #PBS -m be\n\ #PBS -l walltime=03:00:00\n\ #PBS -o %s/vectorSampler_out.log\n\ #PBS -e %s/vectorSampler_err.log\n\ \n\ \n\ module load python/2.7.5\n\ module remove xerces/2.7\n\ module load xerces/2.8\n\ module load gdal/1.11.0-py2.7\n\ \n\ FileConfig=%s\n\ export ITK_AUTOLOAD_PATH=""\n\ . /data/qtis/inglada/modules/repository/otb_superbuild/otb_superbuild-5.7.0-Release-install/config_otb.sh\n\ \n\ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=5\n\ PYPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=pyAppPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ TESTPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=outputPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ cd $PYPATH\n\ \n\ listData=($(find $TESTPATH/dataAppVal -maxdepth 1 -type f -name "*learn.shp"))\n\ InShape=${listData[0]}\n\ python vectorSampler.py -shape $InShape -conf $FileConfig --wd $TMPDIR' % (logPath, logPath, pathConf)) jobFile.close()
def getCoeff(pathToResults, pathtoNom): """ the matrix in OTB's results must be square """ Pre = [] Rec = [] Fs = [] Kappa = [] OA = [] Table_num, Table_cl = getNomenclature(pathtoNom) ResFile = fu.FileSearch_AND(pathToResults, True, "ClassificationResults_") # Récupération des classes listClass = [] ClassFile = open(ResFile[0], 'r') while 1: data = ClassFile.readline().rstrip('\n\r') if data.count("#Reference labels (rows)") != 0: listClass_tmp = data.split(":")[-1].split(",") listClass_tmp = map(int, listClass_tmp) for numClass in listClass_tmp: ind = Table_num.index(int(numClass)) listClass.append(Table_cl[ind]) break ClassFile.close() # Récupération des infos dans le fichiers de résultats for res in ResFile: resFile = open(res, 'r') while 1: data = resFile.readline().rstrip('\n\r') if data.count("Precision of the different classes:") != 0: Pre.append( data.split(":")[-1].replace("[", "").replace("]", "").replace( " ", "").split(",")) elif data.count("Recall of the different classes:") != 0: Rec.append( data.split(":")[-1].replace("[", "").replace("]", "").replace( " ", "").split(",")) elif data.count("F-score of the different classes:") != 0: Fs.append( data.split(":")[-1].replace("[", "").replace("]", "").replace( " ", "").split(",")) elif data.count("Kappa index") != 0: Kappa.append(float(data.split(":")[-1])) elif data.count("Overall accuracy index") != 0: OA.append(float(data.split(":")[-1])) break resFile.close() PreClass = [] RcallClass = [] FSClass = [] for i in range(len(listClass)): PreClass.append([]) RcallClass.append([]) FSClass.append([]) for i in range(len(Pre)): for j in range(len(PreClass)): PreClass[j].append(float(Pre[i][j])) RcallClass[j].append(float(Rec[i][j])) FSClass[j].append(float(Fs[i][j])) return listClass, PreClass, RcallClass, FSClass, Kappa, OA
def genAnnualShapePoints(coord, gdalDriver, workingDirectory, rasterResolution, classToKeep, dataField, tile, validityThreshold, validityRaster, classificationRaster, maskFolder, inlearningShape, outlearningShape): currentRegion = inlearningShape.split("/")[-1].split("_")[2] classifName = os.path.split(classificationRaster)[1] sizeX, sizeY = fu.getRasterResolution(classificationRaster) mapReg = workingDirectory + "/" + classifName.replace(".tif", "_MapReg_" + str(currentRegion) + ".tif") cmd = "otbcli_ClassificationMapRegularization -io.in " + classificationRaster + " -io.out " + mapReg + " -ip.undecidedlabel 0 " print cmd os.system(cmd) rasterVal = workingDirectory + "/" + classifName.replace(".tif", "_VAL_" + str(currentRegion) + ".tif") rasterRdy = workingDirectory + "/" + classifName.replace(".tif", "_RDY_" + str(currentRegion) + ".tif") projection = int(fu.getRasterProjectionEPSG(classificationRaster)) cmd = 'otbcli_BandMath -il ' + validityRaster + ' ' + mapReg + ' -out ' + rasterVal + ' uint8 -exp "im1b1>' + str( validityThreshold) + '?im2b1:0 "' print cmd os.system(cmd) Mask = fu.FileSearch_AND(maskFolder, True, tile, ".tif", "region_" + str(currentRegion.split("f")[0]))[0] cmd = 'otbcli_BandMath -il ' + rasterVal + ' ' + Mask + ' -out ' + rasterRdy + ' uint8 -exp "im1b1*im2b1"' # cmd = 'otbcli_BandMath -il '+mapReg+' '+Mask+' -out '+rasterRdy+' uint8 -exp "im1b1*im2b1"' print cmd os.system(cmd) # Resample ? """ if int(sizeX) != int(rasterResolution): resize = float(sizeX)/float(rasterResolution) resample = folder+"/"+classifName.replace(".tif","_Resample_"+str(currentRegion)+".tif") rasterRdy_svg = rasterRdy cmd = "otbcli_RigidTransformResample -in "+rasterRdy+" -out "+resample+" -transform.type.id.scalex "+resize+" -transform.type.id.scaley "+resize print cmd os.system(cmd) rasterRdy = resample """ rasterArray = raster2array(rasterRdy) rasterFile = gdal.Open(classificationRaster) x_origin, y_origin = rasterFile.GetGeoTransform()[0], rasterFile.GetGeoTransform()[3] sizeX, sizeY = rasterFile.GetGeoTransform()[1], rasterFile.GetGeoTransform()[5] rep = getNbSample(inlearningShape, tile, dataField, classToKeep, rasterResolution, currentRegion) driver = ogr.GetDriverByName(gdalDriver) if os.path.exists(outlearningShape): driver.DeleteDataSource(outlearningShape) data_source = driver.CreateDataSource(outlearningShape) srs = osr.SpatialReference() srs.ImportFromEPSG(projection) layerOUT = data_source.CreateLayer(dataField, srs, ogr.wkbPoint) field_name = ogr.FieldDefn(dataField, ogr.OFTInteger) # field_name.SetWidth(0) layerOUT.CreateField(field_name) for currentVal in classToKeep: try: nbSamples = rep[int(currentVal)] except: print "class : " + str(currentVal) + " doesn't exist in " + inlearningShape continue Y, X = np.where(rasterArray == int(currentVal)) XYcoordinates = [] for y, x in zip(Y, X): X_c, Y_c = pixCoordinates(x, y, x_origin, y_origin, sizeX, sizeY) XYcoordinates.append((X_c, Y_c)) if nbSamples > len(XYcoordinates): nbSamples = len(XYcoordinates) for Xc, Yc in random.sample(XYcoordinates, nbSamples): # "0" for nbSamples allready manage ? if coord and not (Xc, Yc) in coord: feature = ogr.Feature(layerOUT.GetLayerDefn()) feature.SetField(dataField, int(currentVal)) wkt = "POINT(%f %f)" % (Xc, Yc) point = ogr.CreateGeometryFromWkt(wkt) feature.SetGeometry(point) layerOUT.CreateFeature(feature) feature.Destroy() data_source.Destroy() os.remove(mapReg) os.remove(rasterVal) os.remove(rasterRdy) """
def generateSamples_classifMix(folderSample, workingDirectory, trainShape, pathWd, featuresPath, samplesOptions, annualCrop, AllClass, dataField, pathConf, configPrevClassif): currentTile, bindingPy = trainShape.split("/")[-1].split("_")[0], Config( file(pathConf)).GlobChain.bindingPython targetResolution, validityThreshold = Config( file(pathConf)).chain.spatialResolution, Config( file(pathConf)).argTrain.validityThreshold previousClassifPath, projOut = Config( file(configPrevClassif)).chain.outputPath, Config( file(configPrevClassif)).GlobChain.proj projOut = int(projOut.split(":")[-1]) stack = "/Final/" + fu.getFeatStackName(pathConf) userFeatPath = Config(file(pathConf)).chain.userFeatPath if userFeatPath == "None": userFeatPath = None featImg = featuresPath + "/" + currentTile + "/" + stack if bindingPy == "True": featImg = fu.FileSearch_AND(featuresPath + "/" + currentTile + "/tmp/", True, "ST_MASK")[0] nameNonAnnual = trainShape.split("/")[-1].replace(".shp", "_NonAnnu.shp") nonAnnualShape = workingDirectory + "/" + nameNonAnnual nameAnnual = trainShape.split("/")[-1].replace(".shp", "_Annu.shp") AnnualShape = workingDirectory + "/" + nameAnnual nonAnnualCropFind = filterShpByClass(dataField, nonAnnualShape, AllClass, trainShape) annualCropFind = filterShpByClass(dataField, AnnualShape, annualCrop, trainShape) gdalDriver = "SQLite" SampleSel_NA = workingDirectory + "/" + nameNonAnnual.replace( ".shp", "_SampleSel_NA.sqlite") stats_NA = workingDirectory + "/" + nameNonAnnual.replace( ".shp", "_STATS.xml") if nonAnnualCropFind: cmd = "otbcli_PolygonClassStatistics -in " + featImg + " -vec " + nonAnnualShape + " -field " + dataField + " -out " + stats_NA print cmd os.system(cmd) verifPolyStats(stats_NA) cmd = "otbcli_SampleSelection -in " + featImg + " -vec " + nonAnnualShape + " -field " + dataField + " -instats " + stats_NA + " -out " + SampleSel_NA + " " + samplesOptions print cmd os.system(cmd) allCoord = getPointsCoordInShape(SampleSel_NA, gdalDriver) else: allCoord = [0] nameAnnual = trainShape.split("/")[-1].replace(".shp", "_Annu.sqlite") annualShape = workingDirectory + "/" + nameAnnual validityRaster = fu.FileSearch_AND(previousClassifPath + "/final/TMP", True, currentTile, "Cloud.tif")[0] classificationRaster = fu.FileSearch_AND( previousClassifPath + "/final/TMP", True, currentTile + "_seed_0.tif")[0] maskFolder = previousClassifPath + "/classif/MASK" if annualCropFind: genAS.genAnnualShapePoints(allCoord, gdalDriver, workingDirectory, targetResolution, annualCrop, dataField, currentTile, validityThreshold, validityRaster, classificationRaster, maskFolder, trainShape, annualShape) MergeName = trainShape.split("/")[-1].replace(".shp", "_selectionMerge") sampleSelection = workingDirectory + "/" + MergeName + ".sqlite" if nonAnnualCropFind and annualCropFind: createSamplePoint(SampleSel_NA, annualShape, dataField, sampleSelection, projOut) elif nonAnnualCropFind and not annualCropFind: shutil.copy(SampleSel_NA, sampleSelection) elif not nonAnnualCropFind and annualCropFind: shutil.copy(annualShape, sampleSelection) samples = workingDirectory + "/" + trainShape.split("/")[-1].replace( ".shp", "_Samples.sqlite") if bindingPy == "False": folderSample + "/" + trainShape.split("/")[-1].replace( ".shp", "_Samples.sqlite") if not os.path.exists( folderSample + "/" + trainShape.split("/")[-1].replace(".shp", "_Samples.sqlite")): cmd = "otbcli_SampleExtraction -in " + featImg + " -vec " + sampleSelection + " -field " + dataField + " -out " + samples print cmd os.system(cmd) else: AllRefl = sorted( fu.FileSearch_AND(featuresPath + "/" + currentTile + "/tmp/", True, "REFL.tif")) AllMask = sorted( fu.FileSearch_AND(featuresPath + "/" + currentTile + "/tmp/", True, "MASK.tif")) datesInterp = sorted( fu.FileSearch_AND(featuresPath + "/" + currentTile + "/tmp/", True, "DatesInterp")) realDates = sorted( fu.FileSearch_AND(featuresPath + "/" + currentTile + "/tmp/", True, "imagesDate")) print AllRefl print AllMask print datesInterp print realDates # gapFill + feat features = [] concatSensors = otb.Registry.CreateApplication("ConcatenateImages") for refl, mask, datesInterp, realDates in zip(AllRefl, AllMask, datesInterp, realDates): gapFill = otb.Registry.CreateApplication( "ImageTimeSeriesGapFilling") nbDate = fu.getNbDateInTile(realDates) nbReflBands = fu.getRasterNbands(refl) comp = int(nbReflBands) / int(nbDate) print datesInterp if not isinstance(comp, int): raise Exception("unvalid component by date (not integer) : " + comp) gapFill.SetParameterString("in", refl) gapFill.SetParameterString("mask", mask) gapFill.SetParameterString("comp", str(comp)) gapFill.SetParameterString("it", "linear") gapFill.SetParameterString("id", realDates) gapFill.SetParameterString("od", datesInterp) gapFill.Execute() concatSensors.AddImageToParameterInputImageList( "il", gapFill.GetParameterOutputImage("out")) features.append(gapFill) # sensors Concatenation + sampleExtraction sampleExtr = otb.Registry.CreateApplication("SampleExtraction") sampleExtr.SetParameterString("ram", "128") sampleExtr.SetParameterString("vec", sampleSelection) sampleExtr.SetParameterString("field", dataField) sampleExtr.SetParameterString("out", samples) # if len(AllRefl) > 1: # concatSensors.Execute() # sampleExtr.SetParameterInputImage("in",concatSensors.GetParameterOutputImage("out")) # else: # sampleExtr.SetParameterInputImage("in",features[0].GetParameterOutputImage("out")) # sampleExtr.ExecuteAndWriteOutput() if len(AllRefl) > 1: concatSensors.Execute() allFeatures = concatSensors.GetParameterOutputImage("out") else: allFeatures = features[0].GetParameterOutputImage("out") if userFeatPath: print "Add user features" userFeat_arbo = Config(file(pathConf)).userFeat.arbo userFeat_pattern = (Config( file(pathConf)).userFeat.patterns).split(",") concatFeatures = otb.Registry.CreateApplication( "ConcatenateImages") userFeatures = fu.getUserFeatInTile(userFeatPath, currentTile, userFeat_arbo, userFeat_pattern) concatFeatures.SetParameterStringList("il", userFeatures) concatFeatures.Execute() concatAllFeatures = otb.Registry.CreateApplication( "ConcatenateImages") concatAllFeatures.AddImageToParameterInputImageList( "il", allFeatures) concatAllFeatures.AddImageToParameterInputImageList( "il", concatFeatures.GetParameterOutputImage("out")) concatAllFeatures.Execute() allFeatures = concatAllFeatures.GetParameterOutputImage("out") sampleExtr.SetParameterInputImage("in", allFeatures) sampleExtr.ExecuteAndWriteOutput() if pathWd: shutil.copy( samples, folderSample + "/" + trainShape.split("/")[-1].replace(".shp", "_Samples.sqlite")) os.remove(SampleSel_NA) os.remove(sampleSelection) os.remove(stats_NA)
def generateSamples_simple(folderSample, workingDirectory, trainShape, pathWd, featuresPath, samplesOptions, pathConf, dataField): bindingPython = Config(file(pathConf)).GlobChain.bindingPython dataField = Config(file(pathConf)).chain.dataField outputPath = Config(file(pathConf)).chain.outputPath userFeatPath = Config(file(pathConf)).chain.userFeatPath if userFeatPath == "None": userFeatPath = None tmpFolder = outputPath + "/TMPFOLDER" if not os.path.exists(tmpFolder): os.mkdir(tmpFolder) # Sensors S2 = Sensors.Sentinel_2("", Opath(tmpFolder), pathConf, "") L8 = Sensors.Landsat8("", Opath(tmpFolder), pathConf, "") L5 = Sensors.Landsat5("", Opath(tmpFolder), pathConf, "") # shutil.rmtree(tmpFolder, ignore_errors=True) SensorsList = [S2, L8, L5] stats = workingDirectory + "/" + trainShape.split("/")[-1].replace( ".shp", "_stats.xml") tile = trainShape.split("/")[-1].split("_")[0] stack = fu.getFeatStackName(pathConf) feat = featuresPath + "/" + tile + "/Final/" + stack if bindingPython == "True": feat = fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "ST_MASK")[0] os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = "1" cmd = "otbcli_PolygonClassStatistics -in " + feat + " -vec " + trainShape + " -out " + stats + " -field " + dataField print cmd os.system(cmd) verifPolyStats(stats) sampleSelection = workingDirectory + "/" + trainShape.split( "/")[-1].replace(".shp", "_SampleSel.sqlite") cmd = "otbcli_SampleSelection -out " + sampleSelection + " " + samplesOptions + " -field " + dataField + " -in " + feat + " -vec " + trainShape + " -instats " + stats print cmd os.system(cmd) # if pathWd:shutil.copy(sampleSelection,folderSample) os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = "5" samples = workingDirectory + "/" + trainShape.split("/")[-1].replace( ".shp", "_Samples.sqlite") if bindingPython == "True": sampleExtr = otb.Registry.CreateApplication("SampleExtraction") sampleExtr.SetParameterString("vec", sampleSelection) sampleExtr.SetParameterString("field", dataField) sampleExtr.SetParameterString("out", samples) AllRefl = sorted( fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "REFL.tif")) AllMask = sorted( fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "MASK.tif")) datesInterp = sorted( fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "DatesInterp")) realDates = sorted( fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "imagesDate")) print AllRefl print AllMask print datesInterp print realDates # gapFill + feat features = [] concatSensors = otb.Registry.CreateApplication("ConcatenateImages") for refl, mask, datesInterp, realDates in zip(AllRefl, AllMask, datesInterp, realDates): gapFill = otb.Registry.CreateApplication( "ImageTimeSeriesGapFilling") nbDate = fu.getNbDateInTile(realDates) nbReflBands = fu.getRasterNbands(refl) comp = int(nbReflBands) / int(nbDate) print datesInterp if not isinstance(comp, int): raise Exception("unvalid component by date (not integer) : " + comp) gapFill.SetParameterString("in", refl) gapFill.SetParameterString("mask", mask) gapFill.SetParameterString("comp", str(comp)) gapFill.SetParameterString("it", "linear") gapFill.SetParameterString("id", realDates) gapFill.SetParameterString("od", datesInterp) gapFill.Execute() # gapFill.SetParameterString("out","/ptmp/vincenta/tmp/TestGapFill.tif") # gapFill.ExecuteAndWriteOutput() # pause = raw_input("Pause1") # featExtr = otb.Registry.CreateApplication("iota2FeatureExtraction") # featExtr.SetParameterInputImage("in",gapFill.GetParameterOutputImage("out")) # featExtr.SetParameterString("comp",str(comp)) # for currentSensor in SensorsList: # if currentSensor.name in refl: # red = str(currentSensor.bands["BANDS"]["red"]) # nir = str(currentSensor.bands["BANDS"]["NIR"]) # swir = str(currentSensor.bands["BANDS"]["SWIR"]) # featExtr.SetParameterString("red",red) # featExtr.SetParameterString("nir",nir) # featExtr.SetParameterString("swir",swir) # featExtr.SetParameterString("ram","256") # featExtr.Execute() # features.append(featExtr) concatSensors.AddImageToParameterInputImageList( "il", gapFill.GetParameterOutputImage("out")) features.append(gapFill) # sensors Concatenation + sampleExtraction sampleExtr = otb.Registry.CreateApplication("SampleExtraction") sampleExtr.SetParameterString("ram", "1024") sampleExtr.SetParameterString("vec", sampleSelection) sampleExtr.SetParameterString("field", dataField) sampleExtr.SetParameterString("out", samples) if len(AllRefl) > 1: concatSensors.Execute() allFeatures = concatSensors.GetParameterOutputImage("out") else: allFeatures = features[0].GetParameterOutputImage("out") if userFeatPath: print "Add user features" userFeat_arbo = Config(file(pathConf)).userFeat.arbo userFeat_pattern = (Config( file(pathConf)).userFeat.patterns).split(",") concatFeatures = otb.Registry.CreateApplication( "ConcatenateImages") userFeatures = fu.getUserFeatInTile(userFeatPath, tile, userFeat_arbo, userFeat_pattern) concatFeatures.SetParameterStringList("il", userFeatures) concatFeatures.Execute() concatAllFeatures = otb.Registry.CreateApplication( "ConcatenateImages") concatAllFeatures.AddImageToParameterInputImageList( "il", allFeatures) concatAllFeatures.AddImageToParameterInputImageList( "il", concatFeatures.GetParameterOutputImage("out")) concatAllFeatures.Execute() allFeatures = concatAllFeatures.GetParameterOutputImage("out") sampleExtr.SetParameterInputImage("in", allFeatures) sampleExtr.ExecuteAndWriteOutput() # cmd = "otbcli_SampleExtraction -field "+dataField+" -out "+samples+" -vec "+sampleSelection+" -in /ptmp/vincenta/tmp/TestGapFill.tif" # print cmd # pause = raw_input("Pause") # os.system(cmd) else: cmd = "otbcli_SampleExtraction -field " + dataField + " -out " + samples + " -vec " + sampleSelection + " -in " + feat print cmd os.system(cmd) if pathWd: shutil.copy( samples, folderSample + "/" + trainShape.split("/")[-1].replace(".shp", "_Samples.sqlite")) os.remove(sampleSelection) os.remove(stats)
for sensor in list_Sensor: red = str(sensor.bands["BANDS"]["red"]) nir = str(sensor.bands["BANDS"]["NIR"]) swir = str(sensor.bands["BANDS"]["SWIR"]) comp = str(len(sensor.bands["BANDS"].keys())) serieTempGap = sensor.serieTempGap outputFeatures = args.opath + "/Features_" + sensor.name + ".tif" cmd = "otbcli_iota2FeatureExtraction -in " + serieTempGap + " -out " + outputFeatures + " int16 -comp " + comp + " -red " + red + " -nir " + nir + " -swir " + swir print cmd deb = time.time() os.system(cmd) fin = time.time() print "Temps de production des primitives (BATCH) : " + str( fin - deb) AllFeatures = fu.FileSearch_AND(args.opath, True, "Features", ".tif") if len(AllFeatures) == 1: if not os.path.exists(args.wOut + "/Final/"): os.system("mkdir " + args.wOut + "/Final/") if userFeatPath: cmdUFeat = "otbcli_ConcatenateImages -il " + AllFeatures[0] + " " + allUserFeatures + " -out " + \ AllFeatures[0] print cmdUFeat os.system(cmdUFeat) shutil.copy(AllFeatures[0], Stack) elif len(AllFeatures) > 1: AllFeatures = " ".join(AllFeatures) cmd = "otbcli_ConcatenateImages -il " + AllFeatures + " -out " + args.opath + "/Final/" + StackName print cmd os.system(cmd) if userFeatPath:
def launchClassification(tempFolderSerie, Classifmask, model, stats, outputClassif, confmap, pathWd, pathConf, pixType): outputClassif = outputClassif.replace(".tif", "_TMP.tif") confmap = confmap.replace(".tif", "_TMP.tif") os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = "5" featuresPath = Config(file(pathConf)).chain.featuresPath outputPath = Config(file(pathConf)).chain.outputPath tile = outputClassif.split("/")[-1].split("_")[1] userFeatPath = Config(file(pathConf)).chain.userFeatPath if userFeatPath == "None": userFeatPath = None AllRefl = sorted( fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "REFL.tif")) AllMask = sorted( fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "MASK.tif")) datesInterp = sorted( fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "DatesInterp")) realDates = sorted( fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "imagesDate")) tmpFolder = outputPath + "/TMPFOLDER_" + tile if not os.path.exists(tmpFolder): os.mkdir(tmpFolder) # Sensors S2 = Sensors.Sentinel_2("", Opath(tmpFolder), pathConf, "") L8 = Sensors.Landsat8("", Opath(tmpFolder), pathConf, "") L5 = Sensors.Landsat5("", Opath(tmpFolder), pathConf, "") # shutil.rmtree(tmpFolder, ignore_errors=True) SensorsList = [S2, L8, L5] # gapFill + feat features = [] concatSensors = otb.Registry.CreateApplication("ConcatenateImages") for refl, mask, datesInterp, realDates in zip(AllRefl, AllMask, datesInterp, realDates): gapFill = otb.Registry.CreateApplication("ImageTimeSeriesGapFilling") nbDate = fu.getNbDateInTile(realDates) nbReflBands = fu.getRasterNbands(refl) comp = int(nbReflBands) / int(nbDate) if not isinstance(comp, int): raise Exception("unvalid component by date (not integer) : " + comp) gapFill.SetParameterString("in", refl) gapFill.SetParameterString("mask", mask) gapFill.SetParameterString("comp", str(comp)) gapFill.SetParameterString("it", "linear") gapFill.SetParameterString("id", realDates) gapFill.SetParameterString("od", datesInterp) # gapFill.SetParameterString("ram","1024") gapFill.Execute() # featExtr = otb.Registry.CreateApplication("iota2FeatureExtraction") # featExtr.SetParameterInputImage("in",gapFill.GetParameterOutputImage("out")) # featExtr.SetParameterString("comp",str(comp)) # for currentSensor in SensorsList: # if currentSensor.name in refl: # red = str(currentSensor.bands["BANDS"]["red"]) # nir = str(currentSensor.bands["BANDS"]["NIR"]) # swir = str(currentSensor.bands["BANDS"]["SWIR"]) # featExtr.SetParameterString("red",red) # featExtr.SetParameterString("nir",nir) # featExtr.SetParameterString("swir",swir) # featExtr.Execute() # features.append(featExtr) concatSensors.AddImageToParameterInputImageList( "il", gapFill.GetParameterOutputImage("out")) features.append(gapFill) classifier = otb.Registry.CreateApplication("ImageClassifier") classifier.SetParameterString("mask", Classifmask) if stats: classifier.SetParameterString("imstat", stats) classifier.SetParameterString("out", outputClassif) classifier.SetParameterString("model", model) classifier.SetParameterString("confmap", confmap) classifier.SetParameterString("ram", "512") print "AllRefl" print AllRefl # if len(AllRefl) >1: # concatSensors.Execute() # classifier.SetParameterInputImage("in",concatSensors.GetParameterOutputImage("out")) # else: # classifier.SetParameterInputImage("in",features[0].GetParameterOutputImage("out")) # classifier.ExecuteAndWriteOutput() if len(AllRefl) > 1: concatSensors.Execute() allFeatures = concatSensors.GetParameterOutputImage("out") else: allFeatures = features[0].GetParameterOutputImage("out") if userFeatPath: print "Add user features" userFeat_arbo = Config(file(pathConf)).userFeat.arbo userFeat_pattern = (Config( file(pathConf)).userFeat.patterns).split(",") concatFeatures = otb.Registry.CreateApplication("ConcatenateImages") userFeatures = fu.getUserFeatInTile(userFeatPath, tile, userFeat_arbo, userFeat_pattern) concatFeatures.SetParameterStringList("il", userFeatures) concatFeatures.Execute() concatAllFeatures = otb.Registry.CreateApplication("ConcatenateImages") concatAllFeatures.AddImageToParameterInputImageList("il", allFeatures) concatAllFeatures.AddImageToParameterInputImageList( "il", concatFeatures.GetParameterOutputImage("out")) concatAllFeatures.Execute() allFeatures = concatAllFeatures.GetParameterOutputImage("out") classifier.SetParameterInputImage("in", allFeatures) classifier.ExecuteAndWriteOutput() expr = "im2b1>=1?im1b1:0" cmd = 'otbcli_BandMath -il ' + outputClassif + ' ' + Classifmask + ' -out ' + outputClassif.replace( "_TMP.tif", ".tif") + ' -exp "' + expr + '"' print cmd os.system(cmd) cmd = 'otbcli_BandMath -il ' + confmap + ' ' + Classifmask + ' -out ' + confmap.replace( "_TMP.tif", ".tif") + ' -exp "' + expr + '"' print cmd os.system(cmd) if pathWd: shutil.copy(outputClassif.replace("_TMP.tif", ".tif"), outputPath + "/classif") if pathWd: shutil.copy(confmap.replace("_TMP.tif", ".tif"), outputPath + "/classif") os.remove(outputClassif) os.remove(confmap)
def genConfMatrix(pathClassif, pathValid, N, dataField, pathToCmdConfusion, pathConf, pathWd): AllCmd = [] pathTMP = pathClassif + "/TMP" f = file(pathConf) cfg = Config(f) pathTest = cfg.chain.outputPath workingDirectory = pathClassif + "/TMP" if pathWd: workingDirectory = os.getenv('TMPDIR').replace(":", "") # AllTiles = cfg.chain.listTile.split(" ") AllTiles = [] validationFiles = fu.FileSearch_AND(pathValid, True, "_val.shp") for valid in validationFiles: currentTile = valid.split("/")[-1].split("_")[0] try: ind = AllTiles.index(currentTile) except ValueError: AllTiles.append(currentTile) for seed in range(N): # recherche de tout les shapeFiles par seed, par tuiles pour les fusionner for tile in AllTiles: valTile = fu.FileSearch_AND(pathValid, True, tile, "_seed" + str(seed) + "_val.shp") fu.mergeVectors("ShapeValidation_" + tile + "_seed_" + str(seed), pathTMP, valTile) learnTile = fu.FileSearch_AND(pathValid, True, tile, "_seed" + str(seed) + "_learn.shp") fu.mergeVectors("ShapeLearning_" + tile + "_seed_" + str(seed), pathTMP, learnTile) pathDirectory = pathTMP if pathWd != None: pathDirectory = "$TMPDIR" cmd = 'otbcli_ComputeConfusionMatrix -in ' + pathClassif + '/Classif_Seed_' + str( seed ) + '.tif -out ' + pathDirectory + '/' + tile + '_seed_' + str( seed ) + '.csv -ref.vector.field ' + dataField + ' -ref vector -ref.vector.in ' + pathTMP + '/ShapeValidation_' + tile + '_seed_' + str( seed) + '.shp' AllCmd.append(cmd) classif = pathTMP + "/" + tile + "_seed_" + str(seed) + ".tif" diff = pathTMP + "/" + tile + "_seed_" + str(seed) + "_CompRef.tif" footprint = pathTest + "/final/Classif_Seed_0.tif" compareRef( pathTMP + '/ShapeValidation_' + tile + '_seed_' + str(seed) + '.shp', pathTMP + '/ShapeLearning_' + tile + '_seed_' + str(seed) + '.shp', classif, diff, footprint, workingDirectory, pathConf) fu.writeCmds(pathToCmdConfusion + "/confusion.txt", AllCmd) spatialRes = cfg.chain.spatialResolution for seed in range(N): AllDiff = fu.FileSearch_AND(pathTMP, True, "_seed_" + str(seed) + "_CompRef.tif") diff_seed = pathTest + "/final/diff_seed_" + str(seed) + ".tif" if pathWd: diff_seed = workingDirectory + "/diff_seed_" + str(seed) + ".tif" fu.assembleTile_Merge(AllDiff, spatialRes, diff_seed) if pathWd: shutil.copy(workingDirectory + "/diff_seed_" + str(seed) + ".tif", pathTest + "/final/diff_seed_" + str(seed) + ".tif") return (AllCmd)
def launchTraining(pathShapes, pathConf, pathToTiles, dataField, stat, N, pathToCmdTrain, out, pathWd, pathlog): """ OUT : les commandes pour l'app """ cmd_out = [] f = file(pathConf) cfg = Config(f) classif = cfg.argTrain.classifier options = cfg.argTrain.options outputPath = cfg.chain.outputPath samplesMode = Config(file(pathConf)).argTrain.shapeMode dataField = Config(file(pathConf)).chain.dataField binding = Config(file(pathConf)).GlobChain.bindingPython posModel = -3 # model's position, if training shape is split by "_" Stack_ind = fu.getFeatStackName(pathConf) pathToModelConfig = outputPath + "/config_model/configModel.cfg" configModel = open(pathToModelConfig, "w") configModel.write("AllModel:\n[\n") configModel.close() for seed in range(N): pathAppVal = fu.FileSearch_AND(pathShapes, True, "seed" + str(seed), ".shp", "learn") sort = [(path.split("/")[-1].split("_")[posModel], path) for path in pathAppVal] sort = fu.sortByFirstElem(sort) # get tiles by model names = [] for r, paths in sort: tmp = "" for i in range(len(paths)): if i < len(paths) - 1: tmp = tmp + paths[i].split("/")[-1].split("_")[0] + "_" else: tmp = tmp + paths[i].split("/")[-1].split("_")[0] names.append(tmp) cpt = 0 for r, paths in sort: writeConfigName(r, names[cpt], pathToModelConfig) cpt += 1 if samplesMode == "points": pathAppVal = fu.FileSearch_AND(outputPath + "/learningSamples", True, "seed" + str(seed), ".sqlite", "learn") sort = [(path.split("/")[-1].split("_")[posModel], path) for path in pathAppVal] for r, paths in sort: print r if samplesMode != "points": cmd = buildTrainCmd_poly(r, paths, pathToTiles, Stack_ind, classif, options, dataField, out, seed, stat, pathlog) else: if binding == "True" and classif == "svm": outStats = outputPath + "/stats/Model_" + r + ".xml" if os.path.exists(outStats): os.remove(outStats) writeStatsFromSample(paths, outStats) cmd = buildTrainCmd_points(r, paths, classif, options, dataField, out, seed, stat, pathlog) cmd_out.append(cmd) configModel = open(pathToModelConfig, "a") configModel.write("\n]\n") configModel.close() fu.writeCmds(pathToCmdTrain + "/train.txt", cmd_out) return cmd_out
def ClassificationShaping(pathClassif, pathEnvelope, pathImg, fieldEnv, N, pathOut, pathWd, pathConf, colorpath): f = file(pathConf) cfg = Config(f) Stack_ind = fu.getFeatStackName(pathConf) if pathWd == None: TMP = pathOut + "/TMP" if not os.path.exists(pathOut + "/TMP"): os.mkdir(TMP) else: TMP = pathWd if not os.path.exists(pathOut + "/TMP"): os.mkdir(pathOut + "/TMP") classifMode, pathTest, proj = cfg.argClassification.classifMode, cfg.chain.outputPath, \ cfg.GlobChain.proj.split(":")[-1] AllTile, mode, pixType = cfg.chain.listTile.split( " "), cfg.chain.mode, cfg.argClassification.pixType featuresPath, outputStatistics, spatialResolution = cfg.chain.featuresPath, cfg.chain.outputStatistics, cfg.chain.spatialResolution allTMPFolder = fu.fileSearchRegEx(pathTest + "/TMPFOLDER*") if allTMPFolder: for tmpFolder in allTMPFolder: shutil.rmtree(tmpFolder) genGlobalConfidence(AllTile, pathTest, N, mode, classifMode, pathWd, pathConf) if mode == "outside" and classifMode == "fusion": old_classif = fu.fileSearchRegEx( pathTest + "/classif/Classif_*_model_*f*_seed_*.tif") for rm in old_classif: print rm os.remove(rm) # os.system("mv "+rm+" "+pathTest+"/final/TMP/") classification = [] confidence = [] cloud = [] for seed in range(N): classification.append([]) confidence.append([]) cloud.append([]) sort = [] if classifMode == "separate" or mode == "outside": AllClassifSeed = fu.FileSearch_AND(pathClassif, True, ".tif", "Classif", "seed_" + str(seed)) ind = 1 elif classifMode == "fusion": AllClassifSeed = fu.FileSearch_AND( pathClassif, True, "_FUSION_NODATA_seed" + str(seed) + ".tif") ind = 0 for tile in AllClassifSeed: sort.append((tile.split("/")[-1].split("_")[ind], tile)) sort = fu.sortByFirstElem(sort) for tile, paths in sort: exp = "" allCl = "" allCl_rm = [] for i in range(len(paths)): allCl = allCl + paths[i] + " " allCl_rm.append(paths[i]) if i < len(paths) - 1: exp = exp + "im" + str(i + 1) + "b1 + " else: exp = exp + "im" + str(i + 1) + "b1" path_Cl_final = TMP + "/" + tile + "_seed_" + str(seed) + ".tif" classification[seed].append(path_Cl_final) cmd = 'otbcli_BandMath -il ' + allCl + '-out ' + path_Cl_final + ' ' + pixType + ' -exp "' + exp + '"' print cmd os.system(cmd) for currentTileClassif in allCl_rm: os.remove(currentTileClassif) tileConfidence = pathOut + "/TMP/" + tile + "_GlobalConfidence_seed_" + str( seed) + ".tif" confidence[seed].append(tileConfidence) cloudTile = fu.FileSearch_AND(featuresPath + "/" + tile, True, "nbView.tif")[0] ClassifTile = TMP + "/" + tile + "_seed_" + str(seed) + ".tif" cloudTilePriority = pathTest + "/final/TMP/" + tile + "_Cloud.tif" cloudTilePriority_tmp = TMP + "/" + tile + "_Cloud.tif" cloudTilePriority_StatsOK = pathTest + "/final/TMP/" + tile + "_Cloud_StatsOK.tif" cloudTilePriority_tmp_StatsOK = TMP + "/" + tile + "_Cloud_StatsOK.tif" cloud[seed].append(cloudTilePriority) if not os.path.exists(cloudTilePriority): cmd_cloud = 'otbcli_BandMath -il ' + cloudTile + ' ' + ClassifTile + ' -out ' + cloudTilePriority_tmp + ' int16 -exp "im2b1>0?im1b1:0"' print cmd_cloud os.system(cmd_cloud) if outputStatistics == "True": cmd_cloud = 'otbcli_BandMath -il ' + cloudTile + ' ' + ClassifTile + ' -out ' + cloudTilePriority_tmp_StatsOK + ' int16 -exp "im2b1>0?im1b1:-1"' print cmd_cloud os.system(cmd_cloud) if pathWd: shutil.copy(cloudTilePriority_tmp_StatsOK, cloudTilePriority_StatsOK) os.remove(cloudTilePriority_tmp_StatsOK) if pathWd: shutil.copy(cloudTilePriority_tmp, cloudTilePriority) os.remove(cloudTilePriority_tmp) if pathWd != None: os.system("cp -a " + TMP + "/* " + pathOut + "/TMP") for seed in range(N): assembleFolder = pathTest + "/final" if pathWd: assembleFolder = pathWd fu.assembleTile_Merge( classification[seed], spatialResolution, assembleFolder + "/Classif_Seed_" + str(seed) + ".tif") if pathWd: shutil.copy(pathWd + "/Classif_Seed_" + str(seed) + ".tif", pathTest + "/final") fu.assembleTile_Merge( confidence[seed], spatialResolution, assembleFolder + "/Confidence_Seed_" + str(seed) + ".tif") if pathWd: shutil.copy(pathWd + "/Confidence_Seed_" + str(seed) + ".tif", pathTest + "/final") color.CreateIndexedColorImage( pathTest + "/final/Classif_Seed_" + str(seed) + ".tif", colorpath, pixType) fu.assembleTile_Merge(cloud[0], spatialResolution, assembleFolder + "/PixelsValidity.tif") if pathWd: shutil.copy(pathWd + "/PixelsValidity.tif", pathTest + "/final")
def generateSamples_cropMix(folderSample, workingDirectory, trainShape, pathWd, featuresPath, samplesOptions, prevFeatures, annualCrop, AllClass, dataField, pathConf): currentTile = trainShape.split("/")[-1].split("_")[0] bindingPy = Config(file(pathConf)).GlobChain.bindingPython samplesClassifMix = Config(file(pathConf)).argTrain.samplesClassifMix userFeatPath = Config(file(pathConf)).chain.userFeatPath if userFeatPath == "None": userFeatPath = None stack = "/Final/" + fu.getFeatStackName(pathConf) NA_img = featuresPath + "/" + currentTile + "/" + stack A_img = prevFeatures + "/" + currentTile + "/" + stack if bindingPy == "True": NA_img = fu.FileSearch_AND(featuresPath + "/" + currentTile + "/tmp/", True, "ST_MASK")[0] A_img = fu.FileSearch_AND(prevFeatures + "/" + currentTile + "/tmp/", True, "ST_MASK")[0] # Step 1 : filter trainShape in order to keep non-annual class nameNonAnnual = trainShape.split("/")[-1].replace(".shp", "_NonAnnu.shp") nonAnnualShape = workingDirectory + "/" + nameNonAnnual filterShpByClass(dataField, nonAnnualShape, AllClass, trainShape) # Step 2 : filter trainShape in order to keep annual class nameAnnual = trainShape.split("/")[-1].replace(".shp", "_Annu.shp") annualShape = workingDirectory + "/" + nameAnnual annualCropFind = filterShpByClass(dataField, annualShape, annualCrop, trainShape) # Step 3 : nonAnnual stats stats_NA = workingDirectory + "/" + nameNonAnnual.replace( ".shp", "_STATS.xml") cmd = "otbcli_PolygonClassStatistics -in " + NA_img + " -vec " + nonAnnualShape + " -field " + dataField + " -out " + stats_NA print cmd os.system(cmd) verifPolyStats(stats_NA) # Step 4 : Annual stats stats_A = workingDirectory + "/" + nameAnnual.replace(".shp", "_STATS.xml") cmd = "otbcli_PolygonClassStatistics -in " + A_img + " -vec " + annualShape + " -field " + dataField + " -out " + stats_A if annualCropFind: print cmd os.system(cmd) verifPolyStats(stats_A) # Step 5 : Sample Selection NonAnnual SampleSel_NA = workingDirectory + "/" + nameNonAnnual.replace( ".shp", "_SampleSel_NA.sqlite") cmd = "otbcli_SampleSelection -in " + NA_img + " -vec " + nonAnnualShape + " -field " + dataField + " -instats " + stats_NA + " -out " + SampleSel_NA + " " + samplesOptions print cmd os.system(cmd) # Step 6 : Sample Selection Annual SampleSel_A = workingDirectory + "/" + nameAnnual.replace( ".shp", "_SampleSel_A.sqlite") cmd = "otbcli_SampleSelection -in " + A_img + " -vec " + annualShape + " -field " + dataField + " -instats " + stats_A + " -out " + SampleSel_A + " " + samplesOptions if annualCropFind: print cmd os.system(cmd) SampleExtr_NA = workingDirectory + "/" + nameNonAnnual.replace( ".shp", "_SampleExtr_NA.sqlite") SampleExtr_A = workingDirectory + "/" + nameAnnual.replace( ".shp", "_SampleExtr_A.sqlite") if bindingPy == "False": # Step 7 : Sample extraction NonAnnual cmd = "otbcli_SampleExtraction -in " + NA_img + " -vec " + SampleSel_NA + " -field " + dataField + " -out " + SampleExtr_NA print cmd os.system(cmd) # Step 8 : Sample extraction Annual cmd = "otbcli_SampleExtraction -in " + A_img + " -vec " + SampleSel_A + " -field " + dataField + " -out " + SampleExtr_A if annualCropFind: print cmd os.system(cmd) else: # Step 7 : Sample extraction NonAnnual concatSensors = otb.Registry.CreateApplication("ConcatenateImages") AllRefl = sorted( fu.FileSearch_AND(featuresPath + "/" + currentTile + "/tmp/", True, "REFL.tif")) AllMask = sorted( fu.FileSearch_AND(featuresPath + "/" + currentTile + "/tmp/", True, "MASK.tif")) datesInterp = sorted( fu.FileSearch_AND(featuresPath + "/" + currentTile + "/tmp/", True, "DatesInterp")) realDates = sorted( fu.FileSearch_AND(featuresPath + "/" + currentTile + "/tmp/", True, "imagesDate")) features = [] for refl, mask, datesInterp, realDates in zip(AllRefl, AllMask, datesInterp, realDates): gapFill = otb.Registry.CreateApplication( "ImageTimeSeriesGapFilling") nbDate = fu.getNbDateInTile(realDates) nbReflBands = fu.getRasterNbands(refl) comp = int(nbReflBands) / int(nbDate) if not isinstance(comp, int): raise Exception("unvalid component by date (not integer) : " + comp) gapFill.SetParameterString("in", refl) gapFill.SetParameterString("mask", mask) gapFill.SetParameterString("comp", str(comp)) gapFill.SetParameterString("it", "linear") gapFill.SetParameterString("id", realDates) gapFill.SetParameterString("od", datesInterp) # gapFill.SetParameterString("ram","1024") gapFill.Execute() concatSensors.AddImageToParameterInputImageList( "il", gapFill.GetParameterOutputImage("out")) features.append(gapFill) sampleExtr = otb.Registry.CreateApplication("SampleExtraction") sampleExtr.SetParameterString("ram", "128") sampleExtr.SetParameterString("vec", SampleSel_NA) sampleExtr.SetParameterString("field", dataField) sampleExtr.SetParameterString("out", SampleExtr_NA) # if len(AllRefl) > 1: # concatSensors.Execute() # sampleExtr.SetParameterInputImage("in",concatSensors.GetParameterOutputImage("out")) # else: # sampleExtr.SetParameterInputImage("in",features[0].GetParameterOutputImage("out")) # sampleExtr.ExecuteAndWriteOutput() if len(AllRefl) > 1: concatSensors.Execute() allFeatures = concatSensors.GetParameterOutputImage("out") else: allFeatures = features[0].GetParameterOutputImage("out") if userFeatPath: print "Add user features" userFeat_arbo = Config(file(pathConf)).userFeat.arbo userFeat_pattern = (Config( file(pathConf)).userFeat.patterns).split(",") concatFeatures = otb.Registry.CreateApplication( "ConcatenateImages") userFeatures = fu.getUserFeatInTile(userFeatPath, currentTile, userFeat_arbo, userFeat_pattern) concatFeatures.SetParameterStringList("il", userFeatures) concatFeatures.Execute() concatAllFeatures = otb.Registry.CreateApplication( "ConcatenateImages") concatAllFeatures.AddImageToParameterInputImageList( "il", allFeatures) concatAllFeatures.AddImageToParameterInputImageList( "il", concatFeatures.GetParameterOutputImage("out")) concatAllFeatures.Execute() allFeatures = concatAllFeatures.GetParameterOutputImage("out") sampleExtr.SetParameterInputImage("in", allFeatures) sampleExtr.ExecuteAndWriteOutput() # Step 8 : Sample extraction Annual concatSensors = otb.Registry.CreateApplication("ConcatenateImages") AllRefl = sorted( fu.FileSearch_AND(prevFeatures + "/" + currentTile + "/tmp/", True, "REFL.tif")) AllMask = sorted( fu.FileSearch_AND(prevFeatures + "/" + currentTile + "/tmp/", True, "MASK.tif")) datesInterp = sorted( fu.FileSearch_AND(prevFeatures + "/" + currentTile + "/tmp/", True, "DatesInterp")) realDates = sorted( fu.FileSearch_AND(prevFeatures + "/" + currentTile + "/tmp/", True, "imagesDate")) features = [] for refl, mask, datesInterp, realDates in zip(AllRefl, AllMask, datesInterp, realDates): gapFill = otb.Registry.CreateApplication( "ImageTimeSeriesGapFilling") nbDate = fu.getNbDateInTile(realDates) nbReflBands = fu.getRasterNbands(refl) comp = int(nbReflBands) / int(nbDate) if not isinstance(comp, int): raise Exception("unvalid component by date (not integer) : " + comp) gapFill.SetParameterString("in", refl) gapFill.SetParameterString("mask", mask) gapFill.SetParameterString("comp", str(comp)) gapFill.SetParameterString("it", "linear") gapFill.SetParameterString("id", realDates) gapFill.SetParameterString("od", datesInterp) # gapFill.SetParameterString("ram","1024") gapFill.Execute() concatSensors.AddImageToParameterInputImageList( "il", gapFill.GetParameterOutputImage("out")) features.append(gapFill) sampleExtr = otb.Registry.CreateApplication("SampleExtraction") sampleExtr.SetParameterString("ram", "128") sampleExtr.SetParameterString("vec", SampleSel_A) sampleExtr.SetParameterString("field", dataField) sampleExtr.SetParameterString("out", SampleExtr_A) if len(AllRefl) > 1: concatSensors.Execute() allFeatures = concatSensors.GetParameterOutputImage("out") else: allFeatures = features[0].GetParameterOutputImage("out") if userFeatPath: print "Add user features" userFeat_arbo = Config(file(pathConf)).userFeat.arbo userFeat_pattern = (Config( file(pathConf)).userFeat.patterns).split(",") concatFeatures = otb.Registry.CreateApplication( "ConcatenateImages") userFeatures = fu.getUserFeatInTile(userFeatPath, currentTile, userFeat_arbo, userFeat_pattern) concatFeatures.SetParameterStringList("il", userFeatures) concatFeatures.Execute() concatAllFeatures = otb.Registry.CreateApplication( "ConcatenateImages") concatAllFeatures.AddImageToParameterInputImageList( "il", allFeatures) concatAllFeatures.AddImageToParameterInputImageList( "il", concatFeatures.GetParameterOutputImage("out")) concatAllFeatures.Execute() allFeatures = concatAllFeatures.GetParameterOutputImage("out") sampleExtr.SetParameterInputImage("in", allFeatures) if annualCropFind: sampleExtr.ExecuteAndWriteOutput() # Step 9 : Merge MergeName = trainShape.split("/")[-1].replace(".shp", "_Samples") listToMerge = [SampleExtr_NA] if annualCropFind: # listToMerge = [SampleExtr_A,SampleExtr_NA] listToMerge = [SampleExtr_NA, SampleExtr_A] fu.mergeSQLite(MergeName, workingDirectory, listToMerge) samples = workingDirectory + "/" + trainShape.split("/")[-1].replace( ".shp", "_Samples.sqlite") os.remove(stats_NA) os.remove(SampleSel_NA) os.remove(SampleExtr_NA) fu.removeShape(nonAnnualShape.replace(".shp", ""), [".prj", ".shp", ".dbf", ".shx"]) if annualCropFind: os.remove(stats_A) os.remove(SampleSel_A) os.remove(SampleExtr_A) fu.removeShape(annualShape.replace(".shp", ""), [".prj", ".shp", ".dbf", ".shx"]) if pathWd: shutil.copy( samples, folderSample + "/" + trainShape.split("/")[-1].replace(".shp", "_Samples.sqlite"))
def genJob(jobPath, testPath, logPath, pathConf): f = file(pathConf) cfg = Config(f) pathToJob = jobPath + "/dataAppVal.pbs" if os.path.exists(pathToJob): os.system("rm " + pathToJob) AllShape = fu.FileSearch_AND(testPath + "/dataRegion", True, ".shp") nbShape = len(AllShape) if nbShape > 1: jobFile = open(pathToJob, "w") jobFile.write('#!/bin/bash\n\ #PBS -N Data_AppVal\n\ #PBS -J 0-%d:1\n\ #PBS -l select=1:ncpus=2:mem=8000mb\n\ #PBS -m be\n\ #PBS -l walltime=10:00:00\n\ #PBS -o %s/Data_AppVal_out.log\n\ #PBS -e %s/Data_AppVal_err.log\n\ \n\ \n\ module load python/2.7.5\n\ module remove xerces/2.7\n\ module load xerces/2.8\n\ #module load gdal/1.11.0-py2.7\n\ \n\ FileConfig=%s\n\ export ITK_AUTOLOAD_PATH=""\n\ export OTB_HOME=$(grep --only-matching --perl-regex "^((?!#).)*(?<=OTB_HOME\:).*" $FileConfig | cut -d "\'" -f 2)\n\ . $OTB_HOME/config_otb.sh\n\ #. /home/user13/theia_oso/vincenta/OTB_5_3/config_otb.sh\n\ \n\ PYPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=pyAppPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ DATAFIELD=$(grep --only-matching --perl-regex "^((?!#).)*(?<=dataField\:).*" $FileConfig | cut -d "\'" -f 2)\n\ Nsample=$(grep --only-matching --perl-regex "^((?!#).)*(?<=runs\:).*" $FileConfig | cut -d "\'" -f 2)\n\ TESTPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=outputPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ RATIO=$(grep --only-matching --perl-regex "^((?!#).)*(?<=ratio\:).*" $FileConfig | cut -d "\'" -f 2)\n\ cd $PYPATH\n\ \n\ listData=($(find $TESTPATH/dataRegion -maxdepth 1 -type f -name "*.shp"))\n\ path=${listData[${PBS_ARRAY_INDEX}]}\n\ echo $FileConfig\n\ echo $RATIO\n\ echo $path\n\ echo $DATAFIELD\n\ echo $Nsample\n\ echo $TESTPATH/dataAppVal\n\ echo $TMPDIR\n\ echo $OTB_HOME/config_otb.sh\n\ python RandomInSituByTile.py -conf $FileConfig -ratio $RATIO -shape.dataTile $path -shape.field $DATAFIELD --sample $Nsample -out $TESTPATH/dataAppVal --wd $TMPDIR' % (nbShape - 1, logPath, logPath, pathConf)) jobFile.close() else: jobFile = open(pathToJob, "w") jobFile.write('#!/bin/bash\n\ #PBS -N Data_AppVal\n\ #PBS -l select=1:ncpus=2:mem=8000mb\n\ #PBS -m be\n\ #PBS -l walltime=10:00:00\n\ #PBS -o %s/Data_AppVal_out.log\n\ #PBS -e %s/Data_AppVal_err.log\n\ \n\ \n\ module load python/2.7.5\n\ module remove xerces/2.7\n\ module load xerces/2.8\n\ module load gdal/1.11.0-py2.7\n\ \n\ FileConfig=%s\n\ export ITK_AUTOLOAD_PATH=""\n\ export OTB_HOME=$(grep --only-matching --perl-regex "^((?!#).)*(?<=OTB_HOME\:).*" $FileConfig | cut -d "\'" -f 2)\n\ . $OTB_HOME/config_otb.sh\n\ #. /home/user13/theia_oso/vincenta/OTB_5_3/config_otb.sh\n\ \n\ PYPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=pyAppPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ DATAFIELD=$(grep --only-matching --perl-regex "^((?!#).)*(?<=dataField\:).*" $FileConfig | cut -d "\'" -f 2)\n\ Nsample=$(grep --only-matching --perl-regex "^((?!#).)*(?<=runs\:).*" $FileConfig | cut -d "\'" -f 2)\n\ TESTPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=outputPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ RATIO=$(grep --only-matching --perl-regex "^((?!#).)*(?<=ratio\:).*" $FileConfig | cut -d "\'" -f 2)\n\ cd $PYPATH\n\ \n\ listData=($(find $TESTPATH/dataRegion -maxdepth 1 -type f -name "*.shp"))\n\ path=${listData[0]}\n\ python RandomInSituByTile.py -conf $FileConfig -ratio $RATIO -shape.dataTile $path -shape.field $DATAFIELD --sample $Nsample -out $TESTPATH/dataAppVal --wd $TMPDIR' % (logPath, logPath, pathConf)) jobFile.close()
def launchChainSequential(PathTEST, tiles, pathTilesL8, pathTilesL5, pathTilesS2, pathNewProcessingChain, pathTilesFeat, configFeature, shapeRegion, field_Region, model, shapeData, dataField, pathConf, N, REARRANGE_PATH, MODE, REARRANGE_FLAG, CLASSIFMODE, NOMENCLATURE, COLORTABLE, RATIO, TRAIN_MODE): if PathTEST != "/" and os.path.exists(PathTEST): choice = "" while (choice != "yes") and (choice != "no") and (choice != "y") and ( choice != "n"): choice = raw_input( "the path " + PathTEST + " already exist, do you want to remove it ? yes or no : ") if (choice == "yes") or (choice == "y"): shutil.rmtree(PathTEST) else: print "Unsafe mode. Overwriting existing output folder." #sys.exit(-1) fieldEnv = "FID" # do not change pathModels = PathTEST + "/model" pathEnvelope = PathTEST + "/envelope" pathClassif = PathTEST + "/classif" pathTileRegion = PathTEST + "/shapeRegion" classifFinal = PathTEST + "/final" dataRegion = PathTEST + "/dataRegion" pathAppVal = PathTEST + "/dataAppVal" pathStats = PathTEST + "/stats" cmdPath = PathTEST + "/cmd" config_model = PathTEST + "/config_model" if not os.path.exists(PathTEST): os.mkdir(PathTEST) if not os.path.exists(pathModels): os.mkdir(pathModels) if not os.path.exists(pathEnvelope): os.mkdir(pathEnvelope) if not os.path.exists(pathClassif): os.mkdir(pathClassif) if not os.path.exists(config_model): os.mkdir(config_model) if not os.path.exists(pathTileRegion): os.mkdir(pathTileRegion) if not os.path.exists(classifFinal): os.mkdir(classifFinal) if not os.path.exists(dataRegion): os.mkdir(dataRegion) if not os.path.exists(pathAppVal): os.mkdir(pathAppVal) if not os.path.exists(pathStats): os.mkdir(pathStats) if not os.path.exists(cmdPath): os.mkdir(cmdPath) os.mkdir(cmdPath + "/stats") os.mkdir(cmdPath + "/train") os.mkdir(cmdPath + "/cla") os.mkdir(cmdPath + "/confusion") os.mkdir(cmdPath + "/features") os.mkdir(cmdPath + "/fusion") os.mkdir(cmdPath + "/splitShape") feat = GFD.CmdFeatures(PathTEST, tiles, pathNewProcessingChain, pathTilesL8, pathTilesL5, pathTilesS2, pathConf, pathTilesFeat, None) for i in range(len(feat)): print feat[i] os.system(feat[i]) # Création des enveloppes env.GenerateShapeTile(tiles, pathTilesFeat, pathEnvelope, None, configFeature) if MODE != "outside": area.generateRegionShape(MODE, pathEnvelope, model, shapeRegion, field_Region, configFeature, None) # Création des régions par tuiles RT.createRegionsByTiles(shapeRegion, field_Region, pathEnvelope, pathTileRegion, None) # pour tout les fichiers dans pathTileRegion regionTile = fu.FileSearch_AND(pathTileRegion, True, ".shp") # ///////////////////////////////////////////////////////////////////////////////////////// for path in regionTile: ExtDR.ExtractData(path, shapeData, dataRegion, pathTilesFeat, configFeature, None) # ///////////////////////////////////////////////////////////////////////////////////////// if REARRANGE_FLAG == 'True': RAM.generateRepartition(PathTEST, pathConf, shapeRegion, REARRANGE_PATH, dataField) # pour tout les shape file par tuiles présent dans dataRegion, créer un ensemble dapp et de val dataTile = fu.FileSearch_AND(dataRegion, True, ".shp") # ///////////////////////////////////////////////////////////////////////////////////////// for path in dataTile: RIST.RandomInSituByTile(path, dataField, N, pathAppVal, RATIO, pathConf, None) # ///////////////////////////////////////////////////////////////////////////////////////// if MODE == "outside" and CLASSIFMODE == "fusion": Allcmd = genCmdSplitS.genCmdSplitShape(pathConf) for cmd in Allcmd: print cmd os.system(cmd) if TRAIN_MODE == "points": trainShape = fu.FileSearch_AND(PathTEST + "/dataAppVal", True, ".shp", "learn") for shape in trainShape: print "" vs.generateSamples(shape, None, configFeature) VSM.vectorSamplesMerge(configFeature) # génération des fichiers de statistiques if not TRAIN_MODE == "points": AllCmd = MS.generateStatModel(pathAppVal, pathTilesFeat, pathStats, cmdPath + "/stats", None, configFeature) for cmd in AllCmd: print cmd print "" stat = cmd.split(' ')[-1] print "Checking if " + stat + " exists..." if not os.path.exists(stat): os.system(cmd) else: print "Keeping existing " + stat + "." # ///////////////////////////////////////////////////////////////////////////////////////// # génération des commandes pour lApp allCmd = LT.launchTraining(pathAppVal, pathConf, pathTilesFeat, dataField, pathStats, N, cmdPath + "/train", pathModels, None, None) # ///////////////////////////////////////////////////////////////////////////////////////// for cmd in allCmd: print cmd print "" os.system(cmd) # ///////////////////////////////////////////////////////////////////////////////////////// # génération des commandes pour la classification cmdClassif = LC.launchClassification(pathModels, pathConf, pathStats, pathTileRegion, pathTilesFeat, shapeRegion, field_Region, N, cmdPath + "/cla", pathClassif, None) # ///////////////////////////////////////////////////////////////////////////////////////// for cmd in cmdClassif: print cmd print "" os.system(cmd) # ///////////////////////////////////////////////////////////////////////////////////////// if CLASSIFMODE == "separate": # Mise en forme des classifications CS.ClassificationShaping(pathClassif, pathEnvelope, pathTilesFeat, fieldEnv, N, classifFinal, None, configFeature, COLORTABLE) # génération des commandes pour les matrices de confusions allCmd_conf = GCM.genConfMatrix(classifFinal, pathAppVal, N, dataField, cmdPath + "/confusion", configFeature, None) for cmd in allCmd_conf: print cmd os.system(cmd) confFus.confFusion(shapeData, dataField, classifFinal + "/TMP", classifFinal + "/TMP", classifFinal + "/TMP", configFeature) GR.genResults(classifFinal, NOMENCLATURE) elif CLASSIFMODE == "fusion" and MODE != "one_region": cmdFus = FUS.fusion(pathClassif, configFeature, None) for cmd in cmdFus: print cmd os.system(cmd) # gestion des nodata fusionFiles = fu.FileSearch_AND(pathClassif, True, "_FUSION_") for fusionpath in fusionFiles: ND.noData(PathTEST, fusionpath, field_Region, pathTilesFeat, shapeRegion, N, configFeature, None) # Mise en forme des classifications CS.ClassificationShaping(pathClassif, pathEnvelope, pathTilesFeat, fieldEnv, N, classifFinal, None, configFeature, COLORTABLE) # génération des commandes pour les matrices de confusions allCmd_conf = GCM.genConfMatrix(classifFinal, pathAppVal, N, dataField, cmdPath + "/confusion", configFeature, None) # ///////////////////////////////////////////////////////////////////////////////////////// for cmd in allCmd_conf: print cmd os.system(cmd) # ///////////////////////////////////////////////////////////////////////////////////////// confFus.confFusion(shapeData, dataField, classifFinal + "/TMP", classifFinal + "/TMP", classifFinal + "/TMP", configFeature) GR.genResults(classifFinal, NOMENCLATURE) elif CLASSIFMODE == "fusion" and MODE == "one_region": raise Exception( "You can't choose the 'one region' mode and use the fusion mode together" ) outStat = Config(file(pathConf)).chain.outputStatistics if outStat == "True": AllTiles = Config(file(pathConf)).chain.listTile AllTiles = AllTiles.split(" ") for currentTile in AllTiles: OutS.outStats(pathConf, currentTile, N, None) MOutS.mergeOutStats(pathConf)
def ComputeAllMatrix(mode, pathToCSV, pathOUT): """ IN mode [string] : "mean" or "sum" pathToCSV [string] : path to the folder which contain all csv confusion matrix pathOUT [string] : path out, -> be careful maybe pathToCSV and pathOUT have to be different the algo use all .csv file in the folder do not iterate test into the same folder... """ csv = [] AllMatrix = [] # Supression des csv tmp csvtmp = fu.FileSearch_AND(pathToCSV, True, ".csv~") for i in range(len(csvtmp)): os.system("rm " + csvtmp[i]) csvtmp_ = fu.FileSearch_AND(pathToCSV, True, "_sq.csv") for i in range(len(csvtmp_)): os.system("rm " + csvtmp_[i]) # Création des csv tmp csvFile = fu.FileSearch_AND(pathToCSV, True, "Classif_Seed") # Vérification et création des matrices carrées for mat in csvFile: csv.append(VerifConfMatrix(mat)) for mat in csv: AllMatrix.append(getCSVMatrix(mat)) # AllMatrix = [numMatrix][y][x] # Sum AllMatrix = np.array(AllMatrix) MatrixSum = [] # initialisation de la matrice de Sum for y in range(len(AllMatrix[0])): MatrixSum.append([]) for x in range(len(AllMatrix[0])): # car les matrices sont carrées MatrixSum[y].append(0) # calcul for Mat in AllMatrix: for y in range(len(Mat)): for x in range(len(Mat[y])): MatrixSum[y][x] = MatrixSum[y][x] + Mat[y][x] # Récupération des classes FileIn = open(csv[0], "r") while 1: data = FileIn.readline().rstrip('\n\r') if data.count("#Reference labels") != 0: head1 = data elif data.count("#Produced labels") != 0: head2 = data break FileIn.close() nbMatrix = len(AllMatrix) FileOut = open(pathOUT, "w") FileOut.write("%s\n" % (head1)) FileOut.write("%s\n" % (head2)) if mode == "sum": for y in range(len(MatrixSum)): for x in range(len(MatrixSum[y])): if x == len(MatrixSum[y]) - 1: FileOut.write("%d" % (MatrixSum[y][x])) else: FileOut.write("%d," % (MatrixSum[y][x])) FileOut.write("\n") elif mode == "mean": for y in range(len(MatrixSum)): for x in range(len(MatrixSum[y])): if x == len(MatrixSum[y]) - 1: FileOut.write("%s" % ("{:.2f}".format( float(MatrixSum[y][x]) / float(nbMatrix)))) else: FileOut.write("%s," % ("{:.2f}".format( float(MatrixSum[y][x]) / float(nbMatrix)))) FileOut.write("\n") FileOut.close()