def step_inputs(self): """ Return ------ the return could be and iterable or a callable """ from simplification import MergeTileRasters as mtr from simplification import VectAndSimp as vas from Common import FileUtils as fut if os.path.exists(self.grid): return mtr.getListVectToClip(self.outmos, self.clipfield, self.outfilevect) else: params = [] if not self.clipvalue: valsfield = vas.getFieldValues(self.clipfile, self.clipfield) for val in valsfield: params.append([ fut.FileSearch_AND(self.outmos, True, ".shp", "hermite")[0], val ]) else: params = [ fut.FileSearch_AND(self.outmos, True, ".shp", "hermite"), self.clipvalue ] print(params) return params
def check_errors_JA(log_dir, task_name): """ """ from Common import FileUtils as fut if os.path.isdir(log_dir): all_logs = fut.FileSearch_AND(log_dir, True, ".ER") else: all_logs = fut.FileSearch_AND(os.path.split(log_dir)[0], True, task_name, ".log") errors = [] for log in all_logs: if check_errors(log): errors.append(check_errors(log)) return errors
def GetDataAugmentationSyntheticParameters(IOTA2_dir): """ read the */learningSample* directory parse the directory */learningSamples* and return a list of all sqlite files Parameters ---------- IOTA2_dir : string absolute path to the IOTA2's directory Example ------- >>> os.listdir("/learningSamples") ["Samples_region_2_seed0_learn.sqlite", "Samples_region_2_seed1_learn.sqlite", "Samples_region_1_seed0_learn.sqlite", "Samples_region_1_seed1_learn.sqlite"] >>> GetAugmentationSamplesParameters("/IOTA2") [Samples_region_1_seed0_learn.sqlite, Samples_region_2_seed0_learn.sqlite, Samples_region_1_seed1_learn.sqlite, Samples_region_2_seed1_learn.sqlite] Return ------ list a list of sqlite files containing samples """ IOTA2_dir_learningSamples = os.path.join(IOTA2_dir, "learningSamples") return fut.FileSearch_AND(IOTA2_dir_learningSamples, True, ".sqlite")
def compute_fusion_options(iota2_dir_final, final_classifications, method, undecidedlabel, dempstershafer_mob, pixType, fusion_path): """ use to determine fusion parameters """ if method == "majorityvoting": options = { "il": final_classifications, "method": method, "nodatalabel": "0", "undecidedlabel": str(undecidedlabel), "pixType": pixType, "out": fusion_path } else: confusionSeed = [ fut.FileSearch_AND(os.path.join(iota2_dir_final, "TMP"), True, "Classif_Seed_{}.csv".format(run))[0] for run in range(len(final_classifications)) ] confusionSeed.sort() final_classifications.sort() options = { "il": final_classifications, "method": "dempstershafer", "nodatalabel": "0", "undecidedlabel": str(undecidedlabel), "method.dempstershafer.mob": dempstershafer_mob, "method.dempstershafer.cmfl": confusionSeed, "pixType": pixType, "out": fusion_path } return options
def getAll_regions(tileName, folder): allRegion = [] allShape = fu.FileSearch_AND(folder, True, "learn", tileName, ".shp") for currentShape in allShape: currentRegion = currentShape.split("/")[-1].split("_")[2] if currentRegion not in allRegion: allRegion.append(currentRegion) return allRegion
def getPaths(TileFolder, pattern): Tiles = os.listdir(TileFolder) paths = [] for currentS2Tile in Tiles: if os.path.isdir(TileFolder + "/" + currentS2Tile): stack = fu.FileSearch_AND(TileFolder + "/" + currentS2Tile, True, pattern) if stack: paths.append(stack[0]) return paths
def step_clean(self): """ """ from Common import FileUtils as fut for filetoremove in fut.FileSearch_AND(self.tmpdir, True, "mask", ".tif"): os.remove(filetoremove) if os.path.exists(os.path.join(self.outputPath, 'final', 'simplification', 'classif_regul.tif')): os.remove(os.path.join(self.tmpdir, 'regul1.tif'))
def computeStats(pathConf, wD=None): dataField = Config(file(pathConf)).chain.dataField iota2Folder = Config(file(pathConf)).chain.outputPath runs = Config(file(pathConf)).chain.runs workingDirectory = iota2Folder + "/final/TMP" if wD: workingDirectory = wD statsBySeed = [] for seed in range(runs): #Get sqlites dataBase = fut.FileSearch_AND(iota2Folder + "/final/TMP", True, ".sqlite", "extraction", "learn") #stats only on learnt polygons #dataBase = fut.FileSearch_AND("/work/OT/theia/oso/TMP/sampleExtraction", True, ".sqlite", "extraction") finalDataBaseName = "statsDataBase_run_" + str( seed) + ".sqlite" #will contain all data base finalDataBasePath = workingDirectory + "/" + finalDataBaseName if os.path.exists(finalDataBasePath): os.remove(finalDataBasePath) shutil.copy(dataBase[0], finalDataBasePath) del dataBase[0] fields = "GEOMETRY," + ",".join( fut.getAllFieldsInShape(finalDataBasePath, driver='SQLite')) conn = lite.connect(finalDataBasePath) cursor = conn.cursor() cursor.execute("select name from sqlite_master where type = 'table';") tableName = str(cursor.fetchall()[-1][0]) print "Fill up statistics dataBase" for currentDataBase in dataBase: print("Add dataBase : {}".format(currentDataBase)) cursor.execute("ATTACH '%s' as db2;" % (currentDataBase)) cursor.execute("CREATE TABLE output2 AS SELECT * FROM db2.output;") cursor.execute("INSERT INTO " + tableName + "(" + fields + ") SELECT " + fields + " FROM output2;") conn.commit() conn = cursor = None conn = lite.connect(finalDataBasePath) cursor = conn.cursor() cleanSqliteDatabase(finalDataBasePath, "output2") #plot relation plotsSeed = plotRelation(finalDataBasePath, dataField, seed, iota2Folder) #Compute statistics print "Compute statistics" statsByClass = computeStatistics(finalDataBasePath, dataField) statsBySeed.append(statsByClass) return statsBySeed
def step_inputs(self): """ Return ------ the return could be and iterable or a callable """ from Common import FileUtils as fut listout = fut.FileSearch_AND(self.tmpdir, True, "mask", ".tif") return [listout]
def mergeSubVector(inpath, classes="", inbase="dept_", outbase="departement_"): listout = fut.FileSearch_AND(inpath, True, inbase, ".shp", "chk") listofchkofzones = fut.sortByFirstElem([ ("_".join(x.split('_')[0:len(x.split('_')) - 1]), x) for x in listout ]) for zone in listofchkofzones: zoneval = zone[0].split('_')[len(zone[0].split('_')) - 1:len(zone[0].split('_'))] outfile = os.path.join(inpath, outbase + zoneval[0] + '.shp') mf.mergeVectors(zone[1], outfile) iota2Formatting(outfile, classes, outfile)
def createRegionsByTiles(shapeRegion, field_Region, pathToEnv, pathOut, pathWd, logger_=logger): """ create a shapeFile into tile's envelope for each regions in shapeRegion and for each tiles IN : - shapeRegion : the shape which contains all regions - field_Region : the field into the region's shape which describes each tile belong to which model - pathToEnv : path to the tile's envelope with priority - pathOut : path to store all resulting shapeFile - pathWd : path to working directory (not mandatory, due to cluster's architecture default = None) """ pathName = pathWd if pathWd == None: #sequential case pathName = pathOut #getAllTiles AllTiles = fu.FileSearch_AND(pathToEnv, True, ".shp") regionList = fu.getFieldElement(shapeRegion, "ESRI Shapefile", field_Region, "unique") shpRegionList = splitVectorLayer(shapeRegion, field_Region, "int", regionList, pathName) AllClip = [] for shp in shpRegionList: for tile in AllTiles: logger_.info("Extract %s in %s", shp, tile) pathToClip = fu.ClipVectorData(shp, tile, pathName) AllClip.append(pathToClip) if pathWd: for clip in AllClip: cmd = "cp " + clip.replace(".shp", "*") + " " + pathOut run(cmd) else: for shp in shpRegionList: path = shp.replace(".shp", "") os.remove(path + ".shp") os.remove(path + ".shx") os.remove(path + ".dbf") os.remove(path + ".prj") return AllClip
def prepareAnnualFeatures(workingDirectory, referenceDirectory, pattern, rename=None): """ double all rasters's pixels rename must be a tuple """ for dirname, dirnames, filenames in os.walk(referenceDirectory): # print path to all subdirectories first. for subdirname in dirnames: os.mkdir( os.path.join(dirname, subdirname).replace( referenceDirectory, workingDirectory).replace(rename[0], rename[1])) # print path to all filenames. for filename in filenames: shutil.copy( os.path.join(dirname, filename), os.path.join(dirname, filename).replace( referenceDirectory, workingDirectory).replace(rename[0], rename[1])) rastersPath = fut.FileSearch_AND(workingDirectory, True, pattern) for raster in rastersPath: cmd = 'otbcli_BandMathX -il ' + raster + ' -out ' + raster + ' -exp "im1+im1"' print cmd os.system(cmd) if rename: all_content = [] for dirname, dirnames, filenames in os.walk(workingDirectory): # print path to all subdirectories first. for subdirname in dirnames: all_content.append(os.path.join(dirname, subdirname)) # print path to all filenames. for filename in filenames: all_content.append(os.path.join(dirname, filename))
def genResults(pathRes, pathNom): """ generate IOTA² final report """ from Validation import ResultsUtils as resU all_csv = fu.FileSearch_AND(pathRes + "/TMP", True, "Classif", ".csv") resU.stats_report(all_csv, pathNom, os.path.join(pathRes, "RESULTS.txt")) for seed_csv in all_csv: name, ext = os.path.splitext(os.path.basename(seed_csv)) out_png = os.path.join(pathRes, "Confusion_Matrix_{}.png".format(name)) resU.gen_confusion_matrix_fig(seed_csv, out_png, pathNom, undecidedlabel=None, dpi=200, write_conf_score=True, grid_conf=True, conf_score="count_sci", threshold=0.1)
def GetDataAugmentationByCopyParameters(iota2_dir_samples): """ read the */learningSample* directory parse the IOTA2's directory */learningSamples* and return a list by seed in order to feed DataAugmentationByCopy function Parameters ---------- iota2_dir_samples : string absolute path to the /learningSamples IOTA2's directory Example ------- >>> os.listdir("/learningSamples") ["Samples_region_2_seed0_learn.sqlite", "Samples_region_2_seed1_learn.sqlite", "Samples_region_1_seed0_learn.sqlite", "Samples_region_1_seed1_learn.sqlite"] >>> GetSamplesSet("/learningSamples") [[Samples_region_1_seed0_learn.sqlite, Samples_region_2_seed0_learn.sqlite], [Samples_region_1_seed1_learn.sqlite, Samples_region_2_seed1_learn.sqlite]] Return ------ list a list of list where each inner list contains all samples for a given run """ seed_pos = 3 samples_set = [(os.path.basename(samples).split("_")[seed_pos], samples) for samples in fut.FileSearch_AND( iota2_dir_samples, True, "Samples_region", "sqlite")] samples_set = [ samplesSeed for seed, samplesSeed in fut.sortByFirstElem(samples_set) ] return samples_set
def extractStats(vectorIn, pathConf, wD=None): dataField = Config(open(pathConf)).chain.dataField iota2Folder = Config(open(pathConf)).chain.outputPath tileToCompute = vectorIn.split("/")[-1].split("_")[0] modelToCompute = vectorIn.split("/")[-1].split("_")[2].split("f")[0] seed = vectorIn.split("/")[-1].split("_")[3].replace("seed", "") workingDirectory = iota2Folder + "/final/TMP" shapeMode = vectorIn.split("/")[-1].split("_")[-1].split(".")[ 0] #'learn' or 'val' if wD: workingDirectory = wD try: refImg = fut.FileSearch_AND(iota2Folder + "/final/TMP", True, tileToCompute, ".tif")[0] except: raise Exception("reference image can not be found in " + iota2Folder + "/final/TMP") statsFile = workingDirectory + "/" + tileToCompute + "_stats_model_" + modelToCompute + ".xml" stats = otbApp.CreatePolygonClassStatisticsApplication({"in":refImg, "vec":vectorIn,\ "out":statsFile, "field":dataField}) stats.ExecuteAndWriteOutput() selVector = workingDirectory + "/" + tileToCompute + "_selection_model_" + modelToCompute + ".sqlite" sampleS = otbApp.CreateSampleSelectionApplication({"in":refImg, "vec":vectorIn, "out":selVector,\ "instats":statsFile, "strategy":"all",\ "field":dataField}) sampleS.ExecuteAndWriteOutput() classificationRaster = fut.FileSearch_AND( iota2Folder + "/final/TMP", True, tileToCompute + "_seed_" + seed + ".tif")[0] validity = fut.FileSearch_AND(iota2Folder + "/final/TMP", True, tileToCompute + "_Cloud.tif")[0] confiance = fut.FileSearch_AND( iota2Folder + "/final/TMP", True, tileToCompute + "_GlobalConfidence_seed_" + seed + ".tif")[0] stack = [classificationRaster, validity, confiance] dataStack = otbApp.CreateConcatenateImagesApplication({ "il": stack, "ram": '1000', "pixType": "uint8", "out": "" }) dataStack.Execute() outSampleExtraction = workingDirectory + "/" + tileToCompute + "_extraction_model_" + modelToCompute + "_" + shapeMode + ".sqlite" extraction = otbApp.CreateSampleExtractionApplication({"in":dataStack, "vec":selVector,\ "field":dataField, " out":outSampleExtraction,\ "outfield":"list",\ "outfield.list.names":["predictedClass", "validity", "confidence"]}) extraction.ExecuteAndWriteOutput() conn = lite.connect(outSampleExtraction) cursor = conn.cursor() SQL = "alter table output add column TILE TEXT" cursor.execute(SQL) SQL = "update output set TILE='" + tileToCompute + "'" cursor.execute(SQL) SQL = "alter table output add column MODEL TEXT" cursor.execute(SQL) SQL = "update output set MODEL='" + modelToCompute + "'" cursor.execute(SQL) conn.commit() os.remove(statsFile) os.remove(selVector) if wD: shutil.copy(outSampleExtraction, iota2Folder + "/final/TMP")
def S1PreProcess(cfg, process_tile, workingDirectory=None, getFiltered=False): """ IN cfg [string] : path to a configuration file process_tile [list] : list of tiles to be processed workingDirectory [string] : path to a working directory OUT [list of otb's applications need to filter SAR images] allFiltered,allDependence,allMasksOut,allTile """ import multiprocessing from functools import partial import configparser from . import S1FileManager from Common import FileUtils as fut if process_tile and not isinstance(process_tile, list): process_tile = [process_tile] config = configparser.ConfigParser() config.read(cfg) wMode = ast.literal_eval(config.get('Processing', 'writeTemporaryFiles')) wMasks = ast.literal_eval(config.get('Processing', 'getMasks')) stackFlag = ast.literal_eval(config.get('Processing', 'outputStack')) RAMPerProcess = int(config.get('Processing', 'RAMPerProcess')) S1chain = Sentinel1_PreProcess(cfg) S1FileManager = S1FileManager.S1FileManager(cfg) try: fMode = config.get('Processing', 'FilteringMode') except: fMode = "multi" tilesToProcess = [] convert_to_interger = False tilesToProcess = [cTile[1:] for cTile in process_tile] if len(tilesToProcess) == 0: print("No existing tiles found, exiting ...") sys.exit(1) # Analyse SRTM coverage for MGRS tiles to be processed srtm_tiles_check = S1FileManager.checkSRTMCoverage(tilesToProcess) needed_srtm_tiles = [] tilesToProcessChecked = [] # For each MGRS tile to process for tile in tilesToProcess: # Get SRTM tiles coverage statistics srtm_tiles = srtm_tiles_check[tile] current_coverage = 0 current_needed_srtm_tiles = [] # Compute global coverage for (srtm_tile, coverage) in srtm_tiles: current_needed_srtm_tiles.append(srtm_tile) current_coverage += coverage # If SRTM coverage of MGRS tile is enough, process it if current_coverage >= 1.: needed_srtm_tiles += current_needed_srtm_tiles tilesToProcessChecked.append(tile) else: # Skip it print("Tile " + str(tile) + " has insuficient SRTM coverage (" + str(100 * current_coverage) + "%), it will not be processed") # Remove duplicates needed_srtm_tiles = list(set(needed_srtm_tiles)) if len(tilesToProcessChecked) == 0: print("No tiles to process, exiting ...") sys.exit(1) print("Required SRTM tiles: " + str(needed_srtm_tiles)) srtm_ok = True for srtm_tile in needed_srtm_tiles: tile_path = os.path.join(S1chain.SRTM, srtm_tile) if not os.path.exists(tile_path): srtm_ok = False print(tile_path + " is missing") if not srtm_ok: print("Some SRTM tiles are missing, exiting ...") sys.exit(1) if not os.path.exists(S1chain.geoid): print("Geoid file does not exists (" + S1chain.geoid + "), exiting ...") sys.exit(1) tilesSet = list(tilesToProcessChecked) rasterList = [ elem for elem, coordinates in S1FileManager.getS1IntersectByTile( tilesSet[0]) ] comp_per_date = 2 #VV / VH tile = tilesToProcessChecked[0] allMasks = [] if workingDirectory: workingDirectory = os.path.join(workingDirectory, tile) if not os.path.exists(workingDirectory): try: os.mkdir(workingDirectory) except: pass refRaster = fut.FileSearch_AND(S1chain.referencesFolder + "/T" + tile, True, S1chain.rasterPattern)[0] #get SAR rasters which intersection the tile rasterList = S1FileManager.getS1IntersectByTile(tile) #split SAR rasters in different groups rasterList_s1aASC, rasterList_s1aDES, rasterList_s1bASC, rasterList_s1bDES = splitByMode( rasterList) #get detected dates by acquisition mode s1_ASC_dates = getSARDates(rasterList_s1aASC + rasterList_s1bASC) s1_DES_dates = getSARDates(rasterList_s1aDES + rasterList_s1bDES) #find which one as to be concatenate (acquisitions dates are the same) rasterList_s1aASC = concatenateDates(rasterList_s1aASC) rasterList_s1aDES = concatenateDates(rasterList_s1aDES) rasterList_s1bASC = concatenateDates(rasterList_s1bASC) rasterList_s1bDES = concatenateDates(rasterList_s1bDES) output_directory = os.path.join(S1chain.outputPreProcess, tile) if not os.path.exists(output_directory): try: os.mkdir(output_directory) except: print("{} already exists".format(output_directory)) LaunchSARreprojection_prod = partial(LaunchSARreprojection, refRaster=refRaster, tileName=tile, geoid=S1chain.geoid, SRTM=S1chain.SRTM, output_directory=output_directory, RAMPerProcess=RAMPerProcess, workingDirectory=workingDirectory) rasterList_s1aASC_reproj = [] p = multiprocessing.Pool(1) rasterList_s1aASC_reproj.append( p.map(LaunchSARreprojection_prod, rasterList_s1aASC)) p.terminate() p.join() rasterList_s1aDES_reproj = [] p = multiprocessing.Pool(1) rasterList_s1aDES_reproj.append( p.map(LaunchSARreprojection_prod, rasterList_s1aDES)) p.terminate() p.join() rasterList_s1bASC_reproj = [] p = multiprocessing.Pool(1) rasterList_s1bASC_reproj.append( p.map(LaunchSARreprojection_prod, rasterList_s1bASC)) p.terminate() p.join() rasterList_s1bDES_reproj = [] p = multiprocessing.Pool(1) rasterList_s1bDES_reproj.append( p.map(LaunchSARreprojection_prod, rasterList_s1bDES)) p.terminate() p.join() rasterList_s1aASC_reproj_flat = [ pol for SAR_date in rasterList_s1aASC_reproj[0] for pol in SAR_date ] rasterList_s1aDES_reproj_flat = [ pol for SAR_date in rasterList_s1aDES_reproj[0] for pol in SAR_date ] rasterList_s1bASC_reproj_flat = [ pol for SAR_date in rasterList_s1bASC_reproj[0] for pol in SAR_date ] rasterList_s1bDES_reproj_flat = [ pol for SAR_date in rasterList_s1bDES_reproj[0] for pol in SAR_date ] allOrtho_path = rasterList_s1aASC_reproj_flat + rasterList_s1aDES_reproj_flat + rasterList_s1bASC_reproj_flat + rasterList_s1bDES_reproj_flat s1aASC_masks = [ s1aASC.replace(".tif", "_BorderMask.tif") for s1aASC in rasterList_s1aASC_reproj_flat if "_vv_" in s1aASC ] s1aDES_masks = [ s1aDES.replace(".tif", "_BorderMask.tif") for s1aDES in rasterList_s1aDES_reproj_flat if "_vv_" in s1aDES ] s1bASC_masks = [ s1bASC.replace(".tif", "_BorderMask.tif") for s1bASC in rasterList_s1bASC_reproj_flat if "_vv_" in s1bASC ] s1bDES_masks = [ s1bDES.replace(".tif", "_BorderMask.tif") for s1bDES in rasterList_s1bDES_reproj_flat if "_vv_" in s1bDES ] allMasks = s1aASC_masks + s1aDES_masks + s1bASC_masks + s1bDES_masks date_tile = {'s1_ASC': s1_ASC_dates, 's1_DES': s1_DES_dates} #sort detected dates for k, v in list(date_tile.items()): v.sort() #launch outcore generation and prepare mulitemporal filtering filtered = S1FilteringProcessor.main(allOrtho_path, cfg, date_tile, tile) allFiltered = [] allMasksOut = [] for S1_filtered, a, b in filtered: if convert_to_interger: S1_filtered.Execute() convert = SAR_floatToInt(S1_filtered, comp_per_date * len(date_tile[mode]), RAMPerProcess) allFiltered.append(convert) else: allFiltered.append(S1_filtered) allMasksOut.append(allMasks) #In order to avoid "TypeError: can't pickle SwigPyObject objects" if getFiltered: return allFiltered, allMasksOut
def mergeFinalClassifications(iota2_dir, dataField, nom_path, colorFile, runs=1, pixType='uint8', method="majorityvoting", undecidedlabel=255, dempstershafer_mob="precision", keep_runs_results=True, enableCrossValidation=False, validationShape=None, workingDirectory=None, logger=logger): """function use to merge classifications by majorityvoting or dempstershafer's method and evaluate it. get all classifications Classif_Seed_*.tif in the /final directory and fusion them under the raster call Classifications_fusion.tif. Then compute statistics using the results_utils library Parameters ---------- iota2_dir : string path to the iota2's output path dataField : string data's field name nom_path : string path to the nomenclature file colorFile : string path to the color file description runs : int number of iota2 runs (random learning splits) pixType : string output pixel format (available in OTB) method : string fusion's method (majorityvoting/dempstershafer) undecidedlabel : int label for label for un-decisions dempstershafer_mob : string mass of belief measurement (precision/recall/accuracy/kappa) keep_runs_results : bool flag to inform if seeds results could be overwritten enableCrossValidation : bool flag to inform if cross validation is enable validationShape : string path to a shape dedicated to validate fusion of classifications workingDirectory : string path to a working directory See Also -------- results_utils.gen_confusion_matrix_fig results_utils.stats_report """ import shutil from Common import OtbAppBank as otbApp from Validation import ResultsUtils as ru from Common import CreateIndexedColorImage as color fusion_name = "Classifications_fusion.tif" new_results_seed_file = "RESULTS_seeds.txt" fusion_vec_name = "fusion_validation" #without extension confusion_matrix_name = "fusionConfusion.png" if not method in ["majorityvoting", "dempstershafer"]: err_msg = "the fusion method must be 'majorityvoting' or 'dempstershafer'" logger.error(err_msg) raise Exception(err_msg) if not dempstershafer_mob in ["precision", "recall", "accuracy", "kappa"]: err_msg = "the dempstershafer MoB must be 'precision' or 'recall' or 'accuracy' or 'kappa'" logger.error(err_msg) raise Exception(err_msg) iota2_dir_final = os.path.join(iota2_dir, "final") wd = iota2_dir_final wd_merge = os.path.join(iota2_dir_final, "merge_final_classifications") if workingDirectory: wd = workingDirectory wd_merge = workingDirectory final_classifications = [ fut.FileSearch_AND(iota2_dir_final, True, "Classif_Seed_{}.tif".format(run))[0] for run in range(runs) ] fusion_path = os.path.join(wd, fusion_name) fusion_options = compute_fusion_options(iota2_dir_final, final_classifications, method, undecidedlabel, dempstershafer_mob, pixType, fusion_path) logger.debug("fusion options:") logger.debug(fusion_options) fusion_app = otbApp.CreateFusionOfClassificationsApplication( fusion_options) logger.debug("START fusion of final classifications") fusion_app.ExecuteAndWriteOutput() logger.debug("END fusion of final classifications") fusion_color_index = color.CreateIndexedColorImage( fusion_path, colorFile, co_option=["COMPRESS=LZW"], output_pix_type=gdal.GDT_Byte if pixType == "uint8" else gdal.GDT_UInt16) confusion_matrix = os.path.join(iota2_dir_final, "merge_final_classifications", "confusion_mat_maj_vote.csv") if enableCrossValidation is False: vector_val = fut.FileSearch_AND( os.path.join(iota2_dir_final, "merge_final_classifications"), True, "_majvote.sqlite") else: vector_val = fut.FileSearch_AND(os.path.join(iota2_dir, "dataAppVal"), True, "val.sqlite") if validationShape: validation_vector = validationShape else: fut.mergeSQLite(fusion_vec_name, wd_merge, vector_val) validation_vector = os.path.join(wd_merge, fusion_vec_name + ".sqlite") confusion = otbApp.CreateComputeConfusionMatrixApplication({ "in": fusion_path, "out": confusion_matrix, "ref": "vector", "ref.vector.nodata": "0", "ref.vector.in": validation_vector, "ref.vector.field": dataField.lower(), "nodatalabel": "0", "ram": "5000" }) confusion.ExecuteAndWriteOutput() maj_vote_conf_mat = os.path.join(iota2_dir_final, confusion_matrix_name) ru.gen_confusion_matrix_fig(csv_in=confusion_matrix, out_png=maj_vote_conf_mat, nomenclature_path=nom_path, undecidedlabel=undecidedlabel, dpi=900) if keep_runs_results: seed_results = fut.FileSearch_AND(iota2_dir_final, True, "RESULTS.txt")[0] shutil.copy(seed_results, os.path.join(iota2_dir_final, new_results_seed_file)) maj_vote_report = os.path.join(iota2_dir_final, "RESULTS.txt") ru.stats_report(csv_in=[confusion_matrix], nomenclature_path=nom_path, out_report=maj_vote_report, undecidedlabel=undecidedlabel) if workingDirectory: shutil.copy(fusion_path, iota2_dir_final) shutil.copy(fusion_color_index, iota2_dir_final) os.remove(fusion_path)