def pass1(self): logging.info("Start pass 1") # Pass1 : NDSI threshold ndsi_formula = "(im1b" + str(self.nGreen) + "-im1b" + str(self.nSWIR) + \ ")/(im1b" + str(self.nGreen) + "+im1b" + str(self.nSWIR) + ")" logging.info("ndsi formula: " + ndsi_formula) # NDSI condition (ndsi > x and not cloud) condition_ndsi = "(im2b1!=1 and (" + ndsi_formula + ")>" + str( self.ndsi_pass1) + " " condition_pass1 = condition_ndsi + \ " and (im1b" + str(self.nRed) + "> " + str(self.rRed_pass1) + \ ") and (im1b" + str(self.nSWIR) + "< " + str(self.swir_pass) + "))" bandMathPass1 = band_math([self.img, self.all_cloud_path], self.pass1_path + GDAL_OPT, condition_pass1 + "?1:0", self.ram, otb.ImagePixelType_uint8) bandMathPass1.ExecuteAndWriteOutput() bandMathPass1 = None # create a working copy of all cloud mask shutil.copy(self.all_cloud_path, self.cloud_pass1_path) # apply pass 1.5 to discard uncertain snow area # warn this function update in-place both snow and cloud mask if self.rm_snow_inside_cloud: self.pass1_5(self.pass1_path, self.cloud_pass1_path, self.dilation_radius, self.cloud_threshold, self.cloud_min_area_size) # The computation of cloud refine is done below, # because the inital cloud may be updated within pass1_5 # Refine cloud mask for snow detection cond_cloud2 = "im3b1>" + str(self.rRed_darkcloud) # this condition check if pass1_5 caused a cloud mask update condition_donuts = "(im1b1!=im5b1)" condition_shadow = "((im1b1==1 and " + cond_cloud2 + \ ") or im2b1==1 or im4b1==1 or " + condition_donuts + ")" logging.info(condition_shadow) bandMathFinalShadow = band_math([ self.all_cloud_path, op.join(self.path_tmp, "shadow_mask.tif"), op.join(self.path_tmp, "red_nn.tif"), op.join(self.path_tmp, "high_cloud_mask.tif"), self.cloud_pass1_path ], self.cloud_refine_path + GDAL_OPT, condition_shadow, self.ram, otb.ImagePixelType_uint8) bandMathFinalShadow.ExecuteAndWriteOutput() logging.info("End of pass 1")
def merge_masks_at_same_date(snow_product_list, merged_snow_product, threshold=100, ram=None): """ This function implement the fusion of multiple snow mask Keyword arguments: snow_product_list -- the input mask list merged_snow_product -- the output filepath threshold -- the threshold between valid <= invalid data ram -- the ram limitation (not mandatory) """ logging.info("Merging products into " + merged_snow_product) # the merging is performed according the following selection: # if img1 < threshold use img1 data # else if img2 < threshold use img2 data # else if imgN < threshold use imgN data # the order of the images in the input list is important: # we expect to have first the main input products # and then the densification products img_index = range(1, len(snow_product_list) + 1) expression_merging = "".join([ "(im" + str(i) + "b1<=" + str(threshold) + "?im" + str(i) + "b1:" for i in img_index ]) expression_merging += "im" + str(img_index[-1]) + "b1" expression_merging += "".join([")" for i in img_index]) img_list = [i.get_snow_mask() for i in snow_product_list] bandMathApp = band_math(img_list, merged_snow_product, expression_merging, ram, otb.ImagePixelType_uint8) bandMathApp.ExecuteAndWriteOutput() bandMathApp = None
def extract_all_clouds(self): if self.mode == 'lasrc': # Extract shadow wich corresponds to all cloud shadows in larsc product logging.info( "lasrc mode -> extract all clouds from LASRC product using ComputeCloudMask application..." ) computeCMApp = compute_cloud_mask(self.cloud_init, self.all_cloud_path + GDAL_OPT, str(self.all_cloud_mask), self.ram, otb.ImagePixelType_uint8) computeCMApp.ExecuteAndWriteOutput() computeCMApp = None else: if self.mode == 'sen2cor': logging.info( "sen2cor mode -> extract all clouds from SCL layer...") logging.info("All clouds in sen2cor SCL layer corresponds to:") logging.info("- label == 3 -> Cloud shadows") logging.info("- label == 8 -> Cloud medium probability") logging.info("- label == 9 -> Cloud high probability") logging.info("- label == 10 -> Thin cirrus") condition_all_clouds = "im1b1==3 || (im1b1==8 and (im2b" + str( self.nSWIR) + " > " + str( self.swir_pass) + ")) || im1b1==9 || im1b1==10" else: condition_all_clouds = "im1b1 > 0" # bandMathAllCloud = band_math( [self.cloud_init, self.img], self.all_cloud_path + GDAL_OPT, "(" + condition_all_clouds + " > 0)?1:0", self.ram, otb.ImagePixelType_uint8) bandMathAllCloud.ExecuteAndWriteOutput() bandMathAllCloud = None
def pass3(self): # Fuse pass1 and pass2 condition_pass3 = "(im1b1 == 1 or im2b1 == 1)" bandMathPass3 = band_math([self.pass1_path, self.pass2_path], self.pass3_path + GDAL_OPT, condition_pass3 + "?1:0", self.ram, otb.ImagePixelType_uint8) bandMathPass3.ExecuteAndWriteOutput()
def extract_binary_mask(self, mask_in, mask_out, expression, mask_format=""): bandMathApp = band_math([mask_in], mask_out + mask_format, expression, self.ram, otb.ImagePixelType_uint8) bandMathApp.ExecuteAndWriteOutput() return mask_out
def detect_snow(self, nbPass): # Set maximum ITK threads if self.nbThreads: os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = str( self.nbThreads) # External preprocessing if self.do_preprocessing: # Declare a pout dem in the output directory pout_resampled_dem = op.join(self.path_tmp, "dem_resampled.tif") build_dem(self.dem, self.img, pout_resampled_dem, self.ram, self.nbThreads) # Change self.dem to use the resampled DEM (output of build_dem) in this case self.dem = pout_resampled_dem # Initialize the mask noDataMaskExpr = "im1b1==" + str(self.nodata) + "?1:0" bandMath = band_math([self.img], self.nodata_path, noDataMaskExpr, self.ram) bandMath.ExecuteAndWriteOutput() bandMath = None if nbPass >= 0: self.pass0() if nbPass >= 1: self.pass1() if nbPass == 2: self.pass2() # RGB composition composition_RGB(self.img, self.composition_path, self.nSWIR, self.nRed, self.nGreen, self.multi) # Gdal polygonize (needed to produce composition) # TODO: Study possible loss and issue with vectorization product if self.generate_vector: polygonize(self.final_mask_path, self.final_mask_path, self.final_mask_vec_path, self.use_gdal_trace_outline, self.gdal_trace_outline_min_area, self.gdal_trace_outline_dp_toler) # Burn polygons edges on the composition # TODO add pass1 snow polygon in yellow burn_polygons_edges(self.composition_path, self.final_mask_path, self.label_snow, self.label_cloud, self.ram) # Product formating #~ format_SEB_VEC_values(self.final_mask_vec_path, #~ self.label_snow, #~ self.label_cloud, #~ self.label_no_data) self.create_metadata()
def extract_cloud_shadows(self): shadow_mask_path = op.join(self.path_tmp, "shadow_mask.tif") + GDAL_OPT # Extract shadow masks differently if sen2cor or MAJA if self.mode == 'sen2cor': logging.info( "sen2cor mode -> extract all clouds from SCL layer...") logging.info("- label == 3 -> Cloud shadows") bandMathShadow = band_math([self.cloud_init], shadow_mask_path, "(im1b1 == 3)", self.ram, otb.ImagePixelType_uint8) bandMathShadow.ExecuteAndWriteOutput() bandMathShadow = None else: # First extract shadow wich corresponds to shadow of clouds inside the # image computeCMApp = compute_cloud_mask( self.cloud_init, op.join(self.path_tmp, "shadow_in_mask.tif") + GDAL_OPT, str(self.shadow_in_mask), self.ram, otb.ImagePixelType_uint8) computeCMApp.ExecuteAndWriteOutput() computeCMApp = None # Then extract shadow mask of shadows from clouds outside the image computeCMApp = compute_cloud_mask( self.cloud_init, op.join(self.path_tmp, "shadow_out_mask.tif") + GDAL_OPT, str(self.shadow_out_mask), self.ram, otb.ImagePixelType_uint8) computeCMApp.ExecuteAndWriteOutput() computeCMApp = None # The output shadow mask corresponds to a OR logic between the 2 shadow # masks bandMathShadow = band_math([ op.join(self.path_tmp, "shadow_in_mask.tif"), op.join(self.path_tmp, "shadow_out_mask.tif") ], shadow_mask_path, "(im1b1 == 1) || (im2b1 == 1)", self.ram, otb.ImagePixelType_uint8) bandMathShadow.ExecuteAndWriteOutput() bandMathShadow = None
def extract_high_clouds(self): high_clouds_mask_path = op.join(self.path_tmp, "high_cloud_mask.tif") + GDAL_OPT if self.mode == 'sen2cor': logging.info( "sen2cor mode -> extract all clouds from SCL layer...") logging.info("- label == 10 -> Thin cirrus") bandMathHighClouds = band_math([self.cloud_init], high_clouds_mask_path, "(im1b1 == 10)", self.ram, otb.ImagePixelType_uint8) bandMathHighClouds.ExecuteAndWriteOutput() bandMathHighClouds = None else: computeCMApp = compute_cloud_mask(self.cloud_init, high_clouds_mask_path, str(self.high_cloud_mask), self.ram, otb.ImagePixelType_uint8) computeCMApp.ExecuteAndWriteOutput() computeCMApp = None
def extract_backtocloud_mask(self): cloud_mask_for_backtocloud = self.cloud_init if self.mode == 'sen2cor': logging.info( "sen2cor mode -> extract all clouds from SCL layer...") logging.info("All clouds in sen2cor SCL layer corresponds to:") logging.info("- label == 3 -> Cloud shadows") logging.info("- label == 8 -> Cloud medium probability") logging.info("- label == 9 -> Cloud high probability") logging.info("- label == 10 -> Thin cirrus") condition_all_clouds = "im1b1==3 || im1b1==8 || im1b1==9 || im1b1==10 || im1b1==11" else: condition_all_clouds = "im1b1 > 0" condition_back_to_cloud = "(" + condition_all_clouds + ") and (im2b1 > " + str( self.rRed_backtocloud) + ")" bandMathBackToCloud = band_math( [cloud_mask_for_backtocloud, self.redBand_path], self.mask_backtocloud + GDAL_OPT, condition_back_to_cloud + "?1:0", self.ram, otb.ImagePixelType_uint8) bandMathBackToCloud.ExecuteAndWriteOutput()
def pass2(self): # Compute snow fraction in the pass1 image (including nodata pixels) snow_fraction = compute_percent(self.pass1_path, 1) / 100 logging.info("snow fraction in pass1 image:" + str(snow_fraction)) # Compute Zs elevation fraction and histogram values # We compute it in all case as we need to check histogram values to # detect cold clouds in optionnal pass4 snow_line_app = compute_snow_line(self.dem, self.pass1_path, self.cloud_pass1_path, self.dz, self.fsnow_lim, self.fclear_lim, False, -2, -self.dz / 2, self.histogram_path, self.ram) snow_line_app.Execute() self.zs = snow_line_app.GetParameterInt("zs") logging.info("computed ZS:" + str(self.zs)) if snow_fraction > self.fsnow_total_lim: # Test zs value (-1 means that no zs elevation was found) if self.zs != -1: # NDSI threshold again ndsi_formula = "(im1b" + str(self.nGreen) + "-im1b" + str(self.nSWIR) + \ ")/(im1b" + str(self.nGreen) + "+im1b" + str(self.nSWIR) + ")" condition_pass2 = "(im3b1 != 1) and (im2b1>" + str(self.zs) + ")" \ + " and (" + ndsi_formula + "> " + str(self.ndsi_pass2) + ")" \ + " and (im1b" + str(self.nRed) + ">" + str(self.rRed_pass2) + ")" bandMathPass2 = band_math( [self.img, self.dem, self.cloud_refine_path], self.pass2_path + GDAL_OPT, condition_pass2 + "?1:0", self.ram, otb.ImagePixelType_uint8) bandMathPass2.ExecuteAndWriteOutput() bandMathPass2 = None if self.generate_intermediate_vectors: # Generate polygons for pass2 (useful for quality check) # TODO polygonize(self.pass2_path, self.pass2_path, op.join(self.path_tmp, "pass2_vec.shp"), self.use_gdal_trace_outline, self.gdal_trace_outline_min_area, self.gdal_trace_outline_dp_toler) self.pass3() generic_snow_path = self.pass3_path else: # No zs elevation found, take result of pass1 in the output # product logging.warning("did not find zs, keep pass 1 result.") generic_snow_path = self.pass1_path # empty image pass2 is needed for computing snow_all bandMathEmptyPass2 = band_math([self.pass1_path], self.pass2_path + GDAL_OPT, "0", self.ram, otb.ImagePixelType_uint8) bandMathEmptyPass2.ExecuteAndWriteOutput() else: generic_snow_path = self.pass1_path # empty image pass2 is needed for computing snow_all # FIXME: A bit overkill to need to BandMath to create an image with # 0 bandMathEmptyPass2 = band_math([self.pass1_path], self.pass2_path + GDAL_OPT, "0", self.ram, otb.ImagePixelType_uint8) bandMathEmptyPass2.ExecuteAndWriteOutput() if self.generate_intermediate_vectors: # Generate polygons for pass3 (useful for quality check) polygonize(generic_snow_path, generic_snow_path, op.join(self.path_tmp, "pass3_vec.shp"), self.use_gdal_trace_outline, self.gdal_trace_outline_min_area, self.gdal_trace_outline_dp_toler) # Final update of the snow mask (include snow/nosnow/cloud) ## Strict cloud mask checking if self.strict_cloud_mask: logging.info("Strict cloud masking of snow pixels.") logging.info( "Only keep snow pixels which are not in the initial cloud mask in the final mask." ) if self.mode == 'sen2cor': logging.info( "With sen2cor, strict cloud masking corresponds to the default configuration." ) condition_snow = "(im2b1==1) and (im3b1==0)" else: condition_snow = "(im2b1==1)" condition_final = condition_snow + "?" + str(self.label_snow) + \ ":((im1b1==1) or (im3b1==1))?"+str(self.label_cloud)+":0" logging.info("Final condition for snow masking: " + condition_final) bandMathFinalCloud = band_math( [self.cloud_refine_path, generic_snow_path, self.mask_backtocloud], self.final_mask_path, condition_final, self.ram, otb.ImagePixelType_uint8) bandMathFinalCloud.ExecuteAndWriteOutput() bandMathFinalCloud = None # Apply the no-data mask bandMathNoData = band_math( [self.final_mask_path, self.nodata_path], self.final_mask_path, "im2b1==1?" + str(self.label_no_data) + ":im1b1", self.ram, otb.ImagePixelType_uint8) bandMathNoData.ExecuteAndWriteOutput() bandMathNoData = None # Compute the complete snow mask app = compute_snow_mask(self.pass1_path, self.pass2_path, self.cloud_pass1_path, self.cloud_refine_path, self.all_cloud_path, self.snow_all_path, self.slope_mask_path, self.ram, otb.ImagePixelType_uint8) app.ExecuteAndWriteOutput()
def __init__(self, data): # Parse general parameters general = data["general"] self.path_tmp = str(general.get("pout")) self.ram = general.get("ram", 512) self.nbThreads = general.get("nb_threads", None) logging.info("Actual number of threads: " + str(self.nbThreads)) self.mode = general.get("mode") self.do_preprocessing = general.get("preprocessing", False) self.nodata = general.get("nodata", -10000) self.multi = general.get("multi", 1) # Multiplier to handle S2 scaling # Resolutions in meter for the snow product # (if -1 the target resolution is equal to the max resolution of the input band) self.target_resolution = general.get("target_resolution", -1) # Parse vector option vector_options = data["vector"] self.generate_vector = vector_options.get("generate_vector", True) self.generate_intermediate_vectors = vector_options.get( "generate_intermediate_vectors", False) self.use_gdal_trace_outline = vector_options.get( "use_gdal_trace_outline", True) self.gdal_trace_outline_dp_toler = vector_options.get( "gdal_trace_outline_dp_toler", 0) self.gdal_trace_outline_min_area = vector_options.get( "gdal_trace_outline_min_area", 0) # Parse cloud data cloud = data["cloud"] self.rf = cloud.get("rf") self.rRed_darkcloud = cloud.get("red_darkcloud") self.rRed_darkcloud *= self.multi self.rRed_backtocloud = cloud.get("red_backtocloud") self.rRed_backtocloud *= self.multi self.shadow_in_mask = cloud.get("shadow_in_mask") self.shadow_out_mask = cloud.get("shadow_out_mask") self.all_cloud_mask = cloud.get("all_cloud_mask") self.high_cloud_mask = cloud.get("high_cloud_mask") ## Strict cloud mask usage (off by default) ## If set to True no pixel from the cloud mask will be marked as snow self.strict_cloud_mask = cloud.get("strict_cloud_mask", False) ## Suppress snow area surrounded by cloud (off by default) self.rm_snow_inside_cloud = cloud.get("rm_snow_inside_cloud", False) self.dilation_radius = cloud.get( "rm_snow_inside_cloud_dilation_radius", 5) self.cloud_threshold = cloud.get("rm_snow_inside_cloud_threshold", 0.85) self.cloud_min_area_size = cloud.get("rm_snow_inside_cloud_min_area", 25000) # Parse input parameters inputs = data["inputs"] if self.do_preprocessing: self.vrt = str(inputs.get("vrt")) # self.img=str(inputs.get("image")) self.dem = str(inputs.get("dem")) self.cloud_init = str(inputs.get("cloud_mask")) ## Get div mask if available self.slope_mask_path = None if inputs.get("div_mask") and inputs.get("div_slope_thres"): self.div_mask = str(inputs.get("div_mask")) self.div_slope_thres = inputs.get("div_slope_thres") self.slope_mask_path = op.join(self.path_tmp, "bad_slope_correction_mask.tif") # Extract the bad slope correction flag bandMathSlopeFlag = band_math( [self.div_mask], self.slope_mask_path, "im1b1>=" + str(self.div_slope_thres) + "?1:0", self.ram, otb.ImagePixelType_uint8) bandMathSlopeFlag.ExecuteAndWriteOutput() bandMathSlopeFlag = None # bands paths gb_path_extracted = extract_band(inputs, "green_band", self.path_tmp, self.nodata) rb_path_extracted = extract_band(inputs, "red_band", self.path_tmp, self.nodata) sb_path_extracted = extract_band(inputs, "swir_band", self.path_tmp, self.nodata) # Keep the input product directory basename as product_id self.product_id = op.basename(op.dirname(inputs["green_band"]["path"])) # check for same res gb_dataset = gdal.Open(gb_path_extracted, GA_ReadOnly) rb_dataset = gdal.Open(rb_path_extracted, GA_ReadOnly) sb_dataset = gdal.Open(sb_path_extracted, GA_ReadOnly) gb_resolution = gb_dataset.GetGeoTransform()[1] rb_resolution = rb_dataset.GetGeoTransform()[1] sb_resolution = sb_dataset.GetGeoTransform()[1] logging.info("green band resolution : " + str(gb_resolution)) logging.info("red band resolution : " + str(rb_resolution)) logging.info("swir band resolution : " + str(sb_resolution)) gb_dataset = None rb_dataset = None sb_dataset = None # test if different reso gb_path_resampled = op.join(self.path_tmp, "green_band_resampled.tif") rb_path_resampled = op.join(self.path_tmp, "red_band_resampled.tif") sb_path_resampled = op.join(self.path_tmp, "swir_band_resampled.tif") # target resolution of the snow product max_res = max(gb_resolution, rb_resolution, sb_resolution) if self.target_resolution == -1: self.target_resolution = max(gb_resolution, rb_resolution, sb_resolution) else: logging.info("Snow product will be at the resolution of " + str(self.target_resolution) + " meters.") # Change target resolution if rb_resolution != self.target_resolution: logging.info("cubic resampling of red band to " + str(self.target_resolution) + " meters.") gdal.Warp(rb_path_resampled, rb_path_extracted, resampleAlg=gdal.GRIORA_Cubic, xRes=self.target_resolution, yRes=self.target_resolution) else: rb_path_resampled = rb_path_extracted if gb_resolution != self.target_resolution: logging.info("cubic resampling of green band to " + str(self.target_resolution) + " meters.") gdal.Warp(gb_path_resampled, gb_path_extracted, resampleAlg=gdal.GRIORA_Cubic, xRes=self.target_resolution, yRes=self.target_resolution) else: gb_path_resampled = gb_path_extracted if sb_resolution != self.target_resolution: logging.info("cubic resampling of swir band to " + str(self.target_resolution) + " meters.") gdal.Warp(sb_path_resampled, sb_path_extracted, resampleAlg=gdal.GRIORA_Cubic, xRes=self.target_resolution, yRes=self.target_resolution) else: sb_path_resampled = sb_path_extracted # build vrt logging.info("building bands vrt") self.img = op.join(self.path_tmp, "lis.vrt") gdal.BuildVRT( self.img, [sb_path_resampled, rb_path_resampled, gb_path_resampled], separate=True) # Set bands parameters self.nGreen = 3 self.nRed = 2 self.nSWIR = 1 # Parse snow parameters snow = data["snow"] self.dz = snow.get("dz") self.swir_pass = snow.get("swir_pass") self.ndsi_pass1 = snow.get("ndsi_pass1") self.rRed_pass1 = snow.get("red_pass1") self.rRed_pass1 *= self.multi self.ndsi_pass2 = snow.get("ndsi_pass2") self.rRed_pass2 = snow.get("red_pass2") self.rRed_pass2 *= self.multi self.fsnow_lim = snow.get("fsnow_lim") self.fsnow_total_lim = snow.get("fsnow_total_lim") self.zs = -1 # default value when zs is not set # Define the minimum amount of clear pixels altitude bin self.fclear_lim = snow.get("fclear_lim", 0.1) # Define label for output snow product self.label_no_snow = "0" self.label_snow = "100" self.label_cloud = "205" self.label_no_data = "254" # Build useful paths self.pass1_path = op.join(self.path_tmp, "pass1.tif") self.pass2_path = op.join(self.path_tmp, "pass2.tif") self.pass3_path = op.join(self.path_tmp, "pass3.tif") self.redBand_path = op.join(self.path_tmp, "red.tif") self.all_cloud_path = op.join(self.path_tmp, "all_cloud_mask.tif") self.cloud_pass1_path = op.join(self.path_tmp, "cloud_pass1.tif") self.cloud_refine_path = op.join(self.path_tmp, "cloud_refine.tif") self.nodata_path = op.join(self.path_tmp, "nodata_mask.tif") self.mask_backtocloud = op.join(self.path_tmp, "mask_backtocloud.tif") # Prepare product directory self.product_path = op.join(self.path_tmp, "LIS_PRODUCTS") if not op.exists(self.product_path): os.makedirs(self.product_path) # Build product file paths self.snow_all_path = op.join(self.product_path, "LIS_SNOW_ALL.TIF") self.final_mask_path = op.join(self.product_path, "LIS_SEB.TIF") self.final_mask_vec_path = op.join(self.product_path, "LIS_SEB_VEC.shp") self.composition_path = op.join(self.product_path, "LIS_COMPO.TIF") self.histogram_path = op.join(self.product_path, "LIS_HISTO.TXT") self.metadata_path = op.join(self.product_path, "LIS_METADATA.XML")
def compare_modis(self): """ Compare the annual map obtained with gap filling approach to the Modis annual map. """ modis_snowserie = str(self.params.get("modis_snow_map")) modis_datefile = self.params.get("modis_snow_map_dates") self.modis_annual_snow_map = op.join(self.path_tmp, "modis_annual_snowmap.tif") modis_dates = read_list_from_file(modis_datefile) modis_start_index = None modis_stop_index = None for i in range(0, len(modis_dates)): tmp_date = str_to_datetime(modis_dates[i], "%Y,%m,%d") if tmp_date == self.date_start: modis_start_index = i if tmp_date == self.date_stop: modis_stop_index = i # generate the summary map band_index = range(modis_start_index + 1, modis_stop_index + 2) expression = "+".join( ["(im1b" + str(i) + "==200?1:0)" for i in band_index]) if not op.exists(self.modis_annual_snow_map): bandMathApp = band_math([modis_snowserie], self.modis_annual_snow_map, expression, self.ram, otb.ImagePixelType_uint16) bandMathApp.ExecuteAndWriteOutput() bandMathApp = None shutil.copy2(self.modis_annual_snow_map, self.path_out) # Compute intersection of the raster footprint intersection, srs = get_raster_intersection(self.annual_snow_map, self.modis_annual_snow_map) # Export intersection as shapefile intersection_shapefile = op.join(self.path_tmp, "intersection.shp") write_poly_to_shapefile(intersection, intersection_shapefile, srs) # Crop to intersection S2 map s2_cropped = self.annual_snow_map.replace(".tif", "_cropped.tif") gdal.Warp(s2_cropped, self.annual_snow_map, format='GTiff', cutlineDSName=intersection_shapefile, cropToCutline=True, dstNodata=-1, outputType=gdal.GDT_Int16) shutil.copy2(s2_cropped, self.path_out) # Crop to intersection MODIS map modis_cropped = self.modis_annual_snow_map.replace( ".tif", "_cropped.tif") gdal.Warp(modis_cropped, self.modis_annual_snow_map, format='GTiff', cutlineDSName=intersection_shapefile, cropToCutline=True, dstNodata=-1, outputType=gdal.GDT_Int16) shutil.copy2(modis_cropped, self.path_out) # Crop to intersection DEM dem_cropped = op.join(self.path_tmp, "dem_cropped.tif") gdal.Warp(dem_cropped, self.dem, format='GTiff', cutlineDSName=intersection_shapefile, cropToCutline=True, dstNodata=-1, outputType=gdal.GDT_Int16) shutil.copy2(dem_cropped, self.path_out) # Reproject the DEM onto MODIS footprint dem_cropped_reprojected = op.join(self.path_tmp, "dem_cropped_reprojected.tif") super_impose_app = super_impose(modis_cropped, dem_cropped, dem_cropped_reprojected, "bco", -1, self.ram, otb.ImagePixelType_int16) super_impose_app.ExecuteAndWriteOutput() super_impose_app = None shutil.copy2(dem_cropped_reprojected, self.path_out) compute_annual_stats(s2_cropped, dem_cropped, modis_cropped, dem_cropped_reprojected, self.path_out, "intersection")
def run_evaluation(self): """ Run the evaluation of gap filled timeserie The evaluation compare the gap filled date to actual comparison snow products """ logging.info("Run snow_annual_map_evaluation") # Set maximum ITK threads if self.nbThreads: os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = str( self.nbThreads) # search matching comparison snow product self.product_dict = self.load_products(self.comparison_path_list, None, None) logging.debug("Product dict:") logging.debug(self.product_dict) # create the comparison products dates file comparison_input_dates = list(sorted(self.product_dict.keys())) write_list_to_file(self.comparison_dates_filename, comparison_input_dates) # load required product self.resulting_snow_mask_dict = {} for key in self.product_dict.keys(): comparison_tag = key + "_comparison" if len(self.product_dict[key]) > 1: merged_mask = op.join( self.path_tmp, comparison_tag + "_merged_snow_product.tif") merge_masks_at_same_date(self.product_dict[key], merged_mask, self.label_snow, self.ram) self.resulting_snow_mask_dict[comparison_tag] = merged_mask else: self.resulting_snow_mask_dict[ comparison_tag] = self.product_dict[key][0].get_snow_mask( ) # convert the snow masks into binary snow masks expression = "im1b1=="+self.label_cloud+"?2:(im1b1=="+self.label_no_data+"?2:" \ + "(im1b1==" + self.label_snow + ")?1:0)" self.binary_snowmask_list = self.convert_mask_list( expression, "snow_eval") logging.debug("Binary snow mask list:") logging.debug(self.binary_snowmask_list) # pair the matching products ts_dates = read_list_from_file(self.output_dates_filename) pair_dict = {} for ts_index, ts_date in enumerate(ts_dates): for comparison_index, comparison_date in enumerate( comparison_input_dates): if ts_date in comparison_date: pair_dict[comparison_date] = (ts_index, comparison_index) print pair_dict # project the snow masks onto the same foot print self.binary_snowmask_list_reprojected = [] for mask_in in self.binary_snowmask_list: mask_out = mask_in.replace(".tif", "_reprojected.tif") if not os.path.exists(mask_out): super_impose_app = super_impose(self.annual_snow_map, mask_in, mask_out + GDAL_OPT_2B, "linear", 2, self.ram, otb.ImagePixelType_uint8) super_impose_app.ExecuteAndWriteOutput() super_impose_app = None self.binary_snowmask_list_reprojected.append(mask_out) # compare the two snow masks comparision_list = [] for comparison_date in pair_dict.keys(): s2_index, comparison_index = pair_dict[comparison_date] path_extracted = op.join( self.path_tmp, "gapfilled_s2_" + comparison_date + ".tif") gdal.Translate(path_extracted, self.gapfilled_timeserie, format='GTiff', outputType=gdal.GDT_Byte, noData=None, bandList=[s2_index + 1]) expression = "im2b1==2?254:(2*im2b1+im1b1)" img_out = op.join(self.path_tmp, "comparision_" + comparison_date + ".tif") bandMathApp = band_math([ path_extracted, self.binary_snowmask_list_reprojected[comparison_index] ], img_out, expression, self.ram, otb.ImagePixelType_uint8) bandMathApp.ExecuteAndWriteOutput() bandMathApp = None comparision_list.append(img_out) # add color table apply_color_table(img_out, self.colorTable) shutil.copy2(img_out, self.path_out) out = op.join(self.path_tmp, "confusion_matrix_" + comparison_date + ".csv") confusionMatrixApp = confusion_matrix( path_extracted, self.binary_snowmask_list_reprojected[comparison_index], out, 2, self.ram) confusionMatrixApp.ExecuteAndWriteOutput() confusionMatrixApp = None shutil.copy2(out, self.path_out) # @TODO gather stats montage = op.join(self.path_tmp, "montage_comparison.png") command = ["montage"] command.extend(["-label", "%t"]) command.extend( ["-title", os.path.basename(self.path_out) + "_comparison"]) command.extend(["-geometry", "10%x10%+2+2", "-pointsize", "40"]) command.extend(comparision_list) command.extend([montage]) subprocess.call(command) logging.info("Command for comparison figure: " + " ".join(command)) shutil.copy2(montage, self.path_out) #if self.mode == "DEBUG": #shutil.copytree(self.path_tmp, op.join(self.path_out, "tmpdir")) logging.info("End snow_annual_map_evaluation")
def run(self): logging.info("Run snow_annual_map") # Set maximum ITK threads if self.nbThreads: os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = str( self.nbThreads) # search matching snow product self.product_dict = self.load_products(self.input_path_list, self.tile_id, None) logging.debug("Product dictionnary:") logging.debug(self.product_dict) # Exiting with error if none of the input products were loaded if not self.product_dict: logging.error("Empty product list!") return # Do the loading of the products to densify the timeserie if self.use_densification: # load densification snow products densification_product_dict = self.load_products( self.densification_path_list, None, None) logging.info("Densification product dict:") logging.info(densification_product_dict) # Get the footprint of the first snow product s2_footprint_ref = self.product_dict[list( self.product_dict.keys())[0]][0].get_snow_mask() if densification_product_dict: # Reproject the densification products on S2 tile before going further for densifier_product_key in densification_product_dict.keys(): for densifier_product in densification_product_dict[ densifier_product_key]: original_mask = densifier_product.get_snow_mask() reprojected_mask = op.join( self.path_tmp, densifier_product.product_name + "_reprojected.tif") if not os.path.exists(reprojected_mask): super_impose_app = super_impose( s2_footprint_ref, original_mask, reprojected_mask, "nn", int(self.label_no_data), self.ram, otb.ImagePixelType_uint8) super_impose_app.ExecuteAndWriteOutput() super_impose_app = None densifier_product.snow_mask = reprojected_mask logging.debug(densifier_product.snow_mask) # Add the products to extend the self.product_dict if densifier_product_key in self.product_dict.keys(): self.product_dict[densifier_product_key].extend( densification_product_dict[densifier_product_key]) else: self.product_dict[ densifier_product_key] = densification_product_dict[ densifier_product_key] else: logging.warning("No Densifying candidate product found!") # re-order products according acquisition date input_dates = sorted(self.product_dict.keys()) write_list_to_file(self.input_dates_filename, input_dates) # compute or retrive the output dates output_dates = [] if op.exists(self.output_dates_filename): output_dates = read_list_from_file(self.output_dates_filename) else: tmp_date = self.date_start while tmp_date <= self.date_stop: output_dates.append(datetime_to_str(tmp_date)) tmp_date += timedelta(days=1) write_list_to_file(self.output_dates_filename, output_dates) shutil.copy2(self.input_dates_filename, self.path_out) shutil.copy2(self.output_dates_filename, self.path_out) # merge products at the same date self.resulting_snow_mask_dict = {} for key in self.product_dict.keys(): if len(self.product_dict[key]) > 1: merged_mask = op.join(self.path_tmp, key + "_merged_snow_product.tif") merge_masks_at_same_date(self.product_dict[key], merged_mask, self.label_snow, self.ram) self.resulting_snow_mask_dict[key] = merged_mask else: self.resulting_snow_mask_dict[key] = self.product_dict[key][ 0].get_snow_mask() # convert the snow masks into binary snow masks expression = "(im1b1==" + self.label_snow + ")?1:0" self.binary_snowmask_list = self.convert_mask_list( expression, "snow", GDAL_OPT) logging.debug("Binary snow mask list:") logging.debug(self.binary_snowmask_list) # convert the snow masks into binary cloud masks expression = "im1b1==" + self.label_cloud + "?1:(im1b1==" + self.label_no_data + "?1:0)" self.binary_cloudmask_list = self.convert_mask_list( expression, "cloud", GDAL_OPT) logging.debug("Binary cloud mask list:") logging.debug(self.binary_cloudmask_list) # build cloud mask vrt logging.info("Building multitemp cloud mask vrt") logging.info("cloud vrt: " + self.multitemp_cloud_vrt) gdal.BuildVRT(self.multitemp_cloud_vrt, self.binary_cloudmask_list, separate=True) # generate the summary map band_index = range(1, len(self.binary_cloudmask_list) + 1) expression = "+".join(["im1b" + str(i) for i in band_index]) bandMathApp = band_math([self.multitemp_cloud_vrt], self.cloud_occurence_img, expression, self.ram, otb.ImagePixelType_uint16) bandMathApp.ExecuteAndWriteOutput() bandMathApp = None logging.info("Copying outputs from tmp to output folder") shutil.copy2(self.cloud_occurence_img, self.path_out) # build snow mask vrt logging.info("Building multitemp snow mask vrt") logging.info("snow vrt: " + self.multitemp_snow_vrt) gdal.BuildVRT(self.multitemp_snow_vrt, self.binary_snowmask_list, separate=True) # gap filling the snow timeserie app_gap_filling = gap_filling(self.multitemp_snow_vrt, self.multitemp_cloud_vrt, self.gapfilled_timeserie + GDAL_OPT, self.input_dates_filename, self.output_dates_filename, self.ram, otb.ImagePixelType_uint8) # @TODO the mode is for now forced to DEBUG in order to generate img on disk #img_in = get_app_output(app_gap_filling, "out", self.mode) #if self.mode == "DEBUG": #shutil.copy2(self.gapfilled_timeserie, self.path_out) #app_gap_filling = None img_in = get_app_output(app_gap_filling, "out", "DEBUG") shutil.copy2(self.gapfilled_timeserie, self.path_out) app_gap_filling = None # generate the annual map band_index = range(1, len(output_dates) + 1) expression = "+".join(["im1b" + str(i) for i in band_index]) bandMathApp = band_math([img_in], self.annual_snow_map, expression, self.ram, otb.ImagePixelType_uint16) bandMathApp.ExecuteAndWriteOutput() bandMathApp = None logging.info("Copying outputs from tmp to output folder") shutil.copy2(self.annual_snow_map, self.path_out) logging.info("End of snow_annual_map") if self.mode == "DEBUG": dest_debug_dir = op.join(self.path_out, "tmpdir") if op.exists(dest_debug_dir): shutil.rmtree(dest_debug_dir) shutil.copytree(self.path_tmp, dest_debug_dir)