def dem_list_to_slope_list(dem_list, save_dir, extent_id, process_num=1):

    slope_list = []
    slope_tif_dir = os.path.join(save_dir, 'slope_sub_%d' % extent_id)
    if os.path.isdir(slope_tif_dir) is False:
        io_function.mkdir(slope_tif_dir)

    if process_num == 1:
        for idx, tif in enumerate(dem_list):
            slope_tif = one_dem_to_slope(tif, slope_tif_dir)
            if slope_tif is not False:
                slope_list.append(slope_tif)
    elif process_num > 1:

        # change backk to multi process of gdalwarp, when get mosaic, gdalwarp multi-thread cannot fully utlized CPUs
        theadPool = Pool(process_num)  # multi processes

        parameters_list = [(tif, slope_tif_dir)
                           for idx, tif in enumerate(dem_list)]
        results = theadPool.starmap(one_dem_to_slope,
                                    parameters_list)  # need python3
        slope_list = [out for out in results if out is not False]
        theadPool.close()
    else:
        raise ValueError('Wrong process number: %s' % str(process_num))

    return slope_list
Exemple #2
0
def main():
    basic.setlogfile('log_to_relative_dem_8bit.txt')

    if os.path.isdir(relative_dem_dir) is False:
        io_function.mkdir(relative_dem_dir)

    # 500 pixel by 500 pixel, that is 1 km by 1 km
    patch_width = 500
    patch_height = 500
    process_num = 1

    failed_tifs = []

    dem_reg_list = io_function.get_file_list_by_pattern(
        arcticDEM_reg_tif_dir, '*dem_reg.tif')
    count = len(dem_reg_list)
    for idx, tif in enumerate(dem_reg_list):
        print('%d/%d convert %s to relative DEM (8bit)' %
              (idx + 1, count, tif))
        rel_dem_8bit = io_function.get_name_by_adding_tail(tif, 'relDEM8bit')
        rel_dem_8bit = os.path.join(relative_dem_dir,
                                    os.path.basename(rel_dem_8bit))
        try:
            dem_to_relative_dem(tif, rel_dem_8bit, patch_width, patch_height,
                                process_num)
        except:
            failed_tifs.append(tif)

    with open('to_relative_dem_failed_cases.txt', 'w') as f_obj:
        for item in failed_tifs:
            f_obj.writelines(item + '\n')
    pass
Exemple #3
0
def crop_to_same_exent_for_diff(dem_tif_list, save_dir, extent_id, extent_poly,
                                process_num):
    # crop to the same extent
    crop_tif_dir = os.path.join(save_dir,
                                'dem_crop_for_diff_sub_%d' % extent_id)
    if os.path.isdir(crop_tif_dir) is False:
        io_function.mkdir(crop_tif_dir)
    crop_tif_list = []
    for tif in dem_tif_list:
        save_crop_path = os.path.join(
            crop_tif_dir,
            os.path.basename(
                io_function.get_name_by_adding_tail(tif, 'sub_poly_%d' %
                                                    extent_id)))
        if os.path.isfile(save_crop_path):
            basic.outputlogMessage('%s exists, skip cropping' % save_crop_path)
            crop_tif_list.append(save_crop_path)
        else:
            crop_tif = subset_image_by_polygon_box(tif,
                                                   save_crop_path,
                                                   extent_poly,
                                                   resample_m='near',
                                                   same_extent=True,
                                                   thread_num=process_num)
            if crop_tif is False:
                # raise ValueError('warning, crop %s failed' % tif)
                continue
            crop_tif_list.append(crop_tif)
    dem_tif_list = crop_tif_list

    return dem_tif_list
Exemple #4
0
def split_an_image(para_file, image_path, save_dir, patch_w, patch_h,
                   overlay_x, overlay_y):

    split_format = parameters.get_string_parameters(para_file,
                                                    'split_image_format')
    out_format = 'PNG'  # default is PNG
    if split_format == '.tif': out_format = 'GTIFF'
    if split_format == '.jpg': out_format = 'JPEG'
    if os.path.isdir(save_dir) is False:
        io_function.mkdir(save_dir)

    split_image.split_image(image_path,
                            save_dir,
                            patch_w,
                            patch_h,
                            overlay_x,
                            overlay_y,
                            out_format,
                            pre_name=None,
                            process_num=8)
    # get list
    patch_list = io_function.get_file_list_by_ext(split_format,
                                                  save_dir,
                                                  bsub_folder=False)
    if len(patch_list) < 1:
        print('Wanring, no images in %s' % save_dir)
        return None
    list_txt_path = save_dir + '_list.txt'
    io_function.save_list_to_txt(list_txt_path, patch_list)
    return list_txt_path
def mosaic_dem_same_stripID(demTif_groups,save_tif_dir, resample_method, process_num=1, save_source=False, o_format='GTiff'):
    if os.path.isdir(save_tif_dir):
        io_function.mkdir(save_tif_dir)

    # when run in parallel, it has "Finalize object, dead" after a while,  cannot figure out why?, so temporal set process_num = 1
    # could related to the output logfile to disk.
    # on tesia, it's fine, but on uist, the issue occurs just in a few minutes.
    # could be useful: Why your multiprocessing Pool is stuck: https://pythonspeed.com/articles/python-multiprocessing/

    # update on 15 March, 2021. I changed the python from 3.8 on uist to 3.7 (same as tesia), then problem solved.
    # but sometime, the program crash without specific reason (get killed)

    # process_num = 1


    mosaic_list = []
    if process_num == 1:
        for key in demTif_groups.keys():
            save_mosaic = mosaic_dem_list(key, demTif_groups[key], save_tif_dir,resample_method,save_source, o_format,thread_num=process_num)
            mosaic_list.append(save_mosaic)
    elif process_num > 1:
        # change backk to multi process of gdalwarp, when get mosaic, gdalwarp multi-thread cannot fully utlized CPUs
        theadPool = Pool(process_num)  # multi processes

        parameters_list = [(key, demTif_groups[key], save_tif_dir, resample_method, save_source, o_format,1) for key in demTif_groups.keys()]

        results = theadPool.starmap(mosaic_dem_list, parameters_list)  # need python3
        mosaic_list = [ out for out in results if out is not False]
        theadPool.close()
    else:
        raise ValueError('Wrong process_num: %d'%process_num)

    return mosaic_list
def segment_subsidence_on_dem_diff(dem_diff_tif, save_dir):

    out_pre = os.path.splitext(os.path.basename(dem_diff_tif))[0]

    # read images
    one_band_img, nodata = raster_io.read_raster_one_band_np(dem_diff_tif)

    # segmentation by threshold (may have too many noise)
    # mean = np.nanmean(one_band_img)
    # print("mean value is: %.4f"%mean)
    # one_band_img = one_band_img - mean    # cannot use mean which may affect by some Outliers
    out_labels = np.zeros_like(one_band_img,dtype=np.uint8)
    out_labels[ one_band_img < -2 ] = 1     # end in a lot of noise, change to -2, -1 results in a lot of polygons

    # apply median filter
    out_labels = cv2.medianBlur(out_labels, 3)  # with kernal=3

    # save the label
    if os.path.isdir(save_dir) is False:
        io_function.mkdir(save_dir)
    label_path = os.path.join(save_dir, out_pre + '_label.tif')
    raster_io.save_numpy_array_to_rasterfile(out_labels, label_path, dem_diff_tif, nodata=0)

    # convert the label to shapefile
    out_shp = os.path.join(save_dir, out_pre + '.shp')
    command_string = 'gdal_polygonize.py -8 %s -b 1 -f "ESRI Shapefile" %s' % (label_path, out_shp)
    res = os.system(command_string)
    if res != 0:
        sys.exit(1)

    # post-processing
    post_processing_subsidence(out_shp)
def run_extract_headwall_jobs(max_job_count, n_tif_per_jobs):

    from dem_common import dem_headwall_shp_dir, dem_slope_dir

    if os.path.isdir(dem_headwall_shp_dir) is False:
        io_function.mkdir(dem_headwall_shp_dir)

    # get slope file list
    slope_tifs = io_function.get_file_list_by_ext('.tif',
                                                  dem_slope_dir,
                                                  bsub_folder=False)
    print('Found %d tif in %s' % (len(slope_tifs), dem_slope_dir))

    # divide grid_ids to many groups
    slope_tif_count = len(slope_tifs)
    slope_tif_groups = [
        slope_tifs[i:i + n_tif_per_jobs]
        for i in range(0, slope_tif_count, n_tif_per_jobs)
    ]

    for idx, slope_tifs_group in enumerate(slope_tif_groups):

        print(
            datetime.now(),
            'processing %d group for extracting headwall, total %d ones' %
            (idx, len(slope_tif_groups)))
        submit_extract_headwall_job(slope_tifs_group, idx, max_job_count)
Exemple #8
0
def main(options, args):
    input = args[0]

    if input.endswith('.txt'):
        slope_tifs = io_function.read_list_from_txt(input)
    elif os.path.isdir(input):
        slope_tifs = io_function.get_file_list_by_ext('.tif',input, bsub_folder=True)
    else:
        slope_tifs = [ input]
    process_num = options.process_num

    working_dir = './'
    save_dir = dem_headwall_shp_dir
    if os.path.isdir(working_dir) is False:
        io_function.mkdir(working_dir)
    if os.path.isdir(save_dir) is False:
        io_function.mkdir(save_dir)

    failed_tifs = []

    min_slope = options.min_slope
    min_size = options.min_area
    max_size = options.max_area
    max_axis_width = options.max_axis_width
    max_box_WH = options.max_box_WH
    for idx, slope in enumerate(slope_tifs):
        if extract_headwall_from_slope(idx, len(slope_tifs), slope,working_dir,save_dir, min_slope,min_size,max_size,max_axis_width,max_box_WH,process_num) is False:
            failed_tifs.append(slope)

    io_function.save_list_to_txt('extract_headwall_failed_tifs.txt',failed_tifs)
def main():
    # run in ~/Data/Arctic/canada_arctic/autoMapping/multiArea_sub_images on tesia
    ini_list = io_function.get_file_list_by_pattern('./','area*.ini')
    txt_list = io_function.get_file_list_by_pattern('./','area*.txt')
    for txt in txt_list:
        ini_s = io_function.read_list_from_txt(txt)
        ini_list.extend(ini_s)

    ini_list = [os.path.abspath(item) for item in ini_list]
    file_names = [ io_function.get_name_no_ext(item) for item in ini_list ]

    cur_dir = os.getcwd()

    # show
    [print(item) for item in ini_list]
    time.sleep(3)

    for name, area_ini in zip(file_names,ini_list):
        word_dir = os.path.join(cur_dir,name)
        io_function.mkdir(word_dir)
        os.chdir(word_dir)
        # copy and modify main_para.ini
        io_function.copyfiletodir(os.path.join(cur_dir,'main_para.ini'),'./',overwrite=True)
        io_function.copyfiletodir(os.path.join(cur_dir,'exe.sh'),'./',overwrite=True)

        parameters.write_Parameters_file('main_para.ini','training_regions',area_ini)

        # run exe.sh
        res = os.system('./exe.sh')
        if res !=0:
            print(res)
            sys.exit(1)

        os.chdir(cur_dir)
Exemple #10
0
def main(options, args):

    t_polygons_shp = args[0]
    image_folder = args[1]   # folder for store image tile (many split block of a big image)

    b_label_image = options.no_label_image
    process_num = options.process_num

    # check training polygons
    assert io_function.is_file_exist(t_polygons_shp)
    t_polygons_shp_all = options.all_training_polygons
    if t_polygons_shp_all is None:
        basic.outputlogMessage('Warning, the full set of training polygons is not assigned, '
                               'it will consider the one in input argument is the full set of training polygons')
        t_polygons_shp_all = t_polygons_shp
    else:
        if get_projection_proj4(t_polygons_shp) != get_projection_proj4(t_polygons_shp_all):
            raise ValueError('error, projection insistence between %s and %s'%(t_polygons_shp, t_polygons_shp_all))
    assert io_function.is_file_exist(t_polygons_shp_all)

    # get image tile list
    # image_tile_list = io_function.get_file_list_by_ext(options.image_ext, image_folder, bsub_folder=False)
    image_tile_list = io_function.get_file_list_by_pattern(image_folder,options.image_ext)
    if len(image_tile_list) < 1:
        raise IOError('error, failed to get image tiles in folder %s'%image_folder)

    check_projection_rasters(image_tile_list)   # it will raise errors if found problems

    # comment out on June 18, 2021,
    # check_1or3band_8bit(image_tile_list)  # it will raise errors if found problems

    #need to check: the shape file and raster should have the same projection.
    if get_projection_proj4(t_polygons_shp) != get_projection_proj4(image_tile_list[0]):
        raise ValueError('error, the input raster (e.g., %s) and vector (%s) files don\'t have the same projection'%(image_tile_list[0],t_polygons_shp))

    # check these are EPSG:4326 projection
    if get_projection_proj4(t_polygons_shp).strip() == '+proj=longlat +datum=WGS84 +no_defs':
        bufferSize = meters_to_degress_onEarth(options.bufferSize)
    else:
        bufferSize = options.bufferSize

    saved_dir = options.out_dir
    # if os.system('mkdir -p ' + os.path.join(saved_dir,'subImages')) != 0:
    #     sys.exit(1)
    # if os.system('mkdir -p ' + os.path.join(saved_dir,'subLabels')) !=0:
    #     sys.exit(1)
    io_function.mkdir(os.path.join(saved_dir,'subImages'))
    if b_label_image:
        io_function.mkdir(os.path.join(saved_dir,'subLabels'))

    dstnodata = options.dstnodata
    if 'qtb_sentinel2' in image_tile_list[0]:
        # for qtb_sentinel-2 mosaic
        pre_name = '_'.join(os.path.splitext(os.path.basename(image_tile_list[0]))[0].split('_')[:4])
    else:
        pre_name = os.path.splitext(os.path.basename(image_tile_list[0]))[0]
    get_sub_images_and_labels(t_polygons_shp, t_polygons_shp_all, bufferSize, image_tile_list,
                              saved_dir, pre_name, dstnodata, brectangle=options.rectangle, b_label=b_label_image,
                              proc_num=process_num)
Exemple #11
0
def predict_one_image_mmseg(para_file, image_path, img_save_dir, inf_list_file,
                            gpuid, trained_model):
    """ run prediction of one image
    """
    expr_name = parameters.get_string_parameters(para_file, 'expr_name')
    network_ini = parameters.get_string_parameters(para_file,
                                                   'network_setting_ini')
    base_config_file = parameters.get_string_parameters(
        network_ini, 'base_config')
    config_file = osp.basename(
        io_function.get_name_by_adding_tail(base_config_file, expr_name))

    inf_batch_size = parameters.get_digit_parameters(network_ini,
                                                     'inf_batch_size', 'int')

    patch_width = parameters.get_digit_parameters(para_file, 'inf_patch_width',
                                                  'int')
    patch_height = parameters.get_digit_parameters(para_file,
                                                   'inf_patch_height', 'int')
    adj_overlay_x = parameters.get_digit_parameters(para_file,
                                                    'inf_pixel_overlay_x',
                                                    'int')
    adj_overlay_y = parameters.get_digit_parameters(para_file,
                                                    'inf_pixel_overlay_y',
                                                    'int')

    done_indicator = '%s_done' % inf_list_file
    if os.path.isfile(done_indicator):
        basic.outputlogMessage('warning, %s exist, skip prediction' %
                               done_indicator)
        return
    if os.path.isdir(img_save_dir) is False:
        io_function.mkdir(img_save_dir)
    # use a specific GPU for prediction, only inference one image
    time0 = time.time()
    if gpuid is None:
        gpuid = 0

    predict_rsImage_mmseg(config_file,
                          trained_model,
                          image_path,
                          img_save_dir,
                          batch_size=inf_batch_size,
                          gpuid=gpuid,
                          tile_width=patch_width,
                          tile_height=patch_height,
                          overlay_x=adj_overlay_x,
                          overlay_y=adj_overlay_y)

    duration = time.time() - time0
    os.system(
        'echo "$(date): time cost of inference for image in %s: %.2f seconds">>"time_cost.txt"'
        % (inf_list_file, duration))
    # write a file to indicate that the prediction has done.
    os.system('echo %s > %s_done' % (inf_list_file, inf_list_file))

    return
Exemple #12
0
def one_dem_diff_to_8bit(demDiff_tif):
    if os.path.isdir(grid_dem_diffs_8bit_dir) is False:
        io_function.mkdir(grid_dem_diffs_8bit_dir)
    tif_8bit = io_function.get_name_by_adding_tail(demDiff_tif, '8bit')
    output = os.path.join(grid_dem_diffs_8bit_dir, os.path.basename(tif_8bit))
    if dem_tif_to_8bit(demDiff_tif, output) is False:
        basic.outputlogMessage('failed to generate 8bit grey image from DEM differnce')
        return False
    return True
def move_files(save_dir, out_fig, out_hist_info):
    if os.path.isdir(save_dir) is False:
        io_function.mkdir(save_dir)
    trim_fig = io_function.get_name_by_adding_tail(out_fig, 'trim')
    os.system('convert -trim %s %s' % (out_fig, trim_fig))
    io_function.movefiletodir(trim_fig, save_dir, overwrite=True)
    io_function.delete_file_or_dir(out_fig)
    # io_function.movefiletodir(out_fig,save_dir,overwrite=True)
    io_function.movefiletodir(out_hist_info, save_dir, overwrite=True)
Exemple #14
0
def check_dem_valid_per(dem_tif_list, work_dir, process_num =1, move_dem_threshold = None, area_pixel_num=None):
    '''
    get the valid pixel percentage for each DEM
    :param dem_tif_list:
    :param work_dir:
    :param move_dem_threshold: move a DEM to a sub-folder if its valid percentage small then the threshold
    :return:
    '''

    keep_dem_list = []
    print('start getting valid pixel percent for %d files'%len(dem_tif_list))
    dem_tif_valid_per = {}
    # when run in parallel, it has "Finalize object, dead" after a while,  cannot figure out why?, so temporal set process_num = 1
    # process_num = 1       #update on 15 March, 2021. I changed the python from 3.8 on uist to 3.7 (same as tesia), then problem solved.

    if process_num == 1:
        for idx,tif in enumerate(dem_tif_list):
            # RSImage.get_valid_pixel_count(tif)
            # per = RSImage.get_valid_pixel_percentage(tif,total_pixel_num=area_pixel_num)
            print('(%d/%d) get valid pixel percent for %s'%(idx+1, len(dem_tif_list),tif))
            per = raster_io.get_valid_pixel_percentage(tif, total_pixel_num=area_pixel_num)
            if per is False:
                return False
            dem_tif_valid_per[tif] = per
            keep_dem_list.append(tif)
    elif process_num > 1:
        theadPool = Pool(process_num)  # multi processes
        parameters_list = [(tif, area_pixel_num, '%d/%d'%(idx+1, len(dem_tif_list)) ) for idx,tif in enumerate(dem_tif_list)]
        results = theadPool.starmap(raster_io.get_valid_pixel_percentage, parameters_list)  # need python3
        for res, tif in zip(results, dem_tif_list):
            if res is False:
                return False
            dem_tif_valid_per[tif] = res
            keep_dem_list.append(tif)
    else:
        raise ValueError("Wrong process_num: %d"%process_num)
    # sort
    dem_tif_valid_per_d = dict(sorted(dem_tif_valid_per.items(), key=operator.itemgetter(1), reverse=True))
    percent_txt = os.path.join(work_dir,'dem_valid_percent.txt')
    with open(percent_txt,'w') as f_obj:
        for key in dem_tif_valid_per_d:
            f_obj.writelines('%s %.4f\n'%(os.path.basename(key),dem_tif_valid_per_d[key]))
        basic.outputlogMessage('save dem valid pixel percentage to %s'%percent_txt)

    # only keep dem with valid pixel greater than a threshold
    if move_dem_threshold is not None:  # int or float
        keep_dem_list = []      # reset the list
        mosaic_dir_rm = os.path.join(work_dir,'dem_valid_lt_%.2f'%move_dem_threshold)
        io_function.mkdir(mosaic_dir_rm)
        for tif in dem_tif_valid_per.keys():
            if dem_tif_valid_per[tif] < move_dem_threshold:
                io_function.movefiletodir(tif,mosaic_dir_rm)
            else:
                keep_dem_list.append(tif)

    return keep_dem_list
Exemple #15
0
def main(options, args):
    extent_shp_or_ids_txt = args[0]
    process_num = options.process_num
    o_res = options.out_res

    if os.path.isdir(grid_matchtag_sum_dir) is False:
        io_function.mkdir(grid_matchtag_sum_dir)

    basic.setlogfile('produce_matchtag_sum_ArcticDEM_log_%s.txt' %
                     timeTools.get_now_time_str())

    # read grids and ids
    time0 = time.time()
    all_grid_polys, all_ids = vector_gpd.read_polygons_attributes_list(
        grid_20_shp, 'id')
    print('time cost of read polygons and attributes', time.time() - time0)

    # get grid ids based on input extent
    grid_base_name = os.path.splitext(
        os.path.basename(extent_shp_or_ids_txt))[0]
    grid_polys, grid_ids = get_grid_20(extent_shp_or_ids_txt, all_grid_polys,
                                       all_ids)

    # check dem difference existence
    grid_dem_tifs, grid_ids_no_sum = get_existing_matchtag_sum(
        grid_matchtag_sum_dir, grid_base_name, grid_ids)
    if len(grid_ids_no_sum) > 0:
        # refine grid_polys
        if len(grid_ids) > len(grid_ids_no_sum):
            id_index = [grid_ids.index(id) for id in grid_ids_no_sum]
            grid_polys = [grid_polys[idx] for idx in id_index]

        # # download ArcticDEM and applying registration
        # tarballs, reg_tifs = download_dem_tarball(dem_strip_shp, grid_polys, arcticDEM_tarball_dir, grid_base_name,
        #                                         reg_tif_dir=arcticDEM_reg_tif_dir, poly_ids=grid_ids_no_demDiff)
        #
        # # unpack and applying registration
        # if len(tarballs) > 0:
        #     basic.outputlogMessage('Processs %d dem tarballs'%len(tarballs))
        #     out_reg_tifs = process_dem_tarball(tarballs,'./',arcticDEM_reg_tif_dir,remove_inter_data=True, apply_registration=True)
        #     basic.outputlogMessage('Get %d new registration dem tifs' % len(out_reg_tifs))
        #     reg_tifs.extend(out_reg_tifs)

        reg_tifs = io_function.get_file_list_by_ext('.tif',
                                                    arcticDEM_reg_tif_dir,
                                                    bsub_folder=False)
        matchtag_tifs = [tif for tif in reg_tifs
                         if 'matchtag' in tif]  # only keep matchtag
        # crop, sum
        out_dem_diffs = produce_matchtag_sum_grids(grid_polys,
                                                   grid_ids_no_sum,
                                                   grid_base_name,
                                                   matchtag_tifs,
                                                   o_res,
                                                   process_num=process_num)
def get_sub_images_pixel_json_files(polygons_shp, image_folder_or_path,
                                    image_pattern, class_names, bufferSize,
                                    dstnodata, saved_dir, b_rectangle,
                                    process_num):

    # check training polygons
    assert io_function.is_file_exist(polygons_shp)

    # get image tile list
    # image_tile_list = io_function.get_file_list_by_ext(options.image_ext, image_folder, bsub_folder=False)
    if os.path.isdir(image_folder_or_path):
        image_tile_list = io_function.get_file_list_by_pattern(
            image_folder_or_path, image_pattern)
    else:
        assert io_function.is_file_exist(image_folder_or_path)
        image_tile_list = [image_folder_or_path]

    if len(image_tile_list) < 1:
        raise IOError('error, failed to get image tiles in folder %s' %
                      image_folder_or_path)

    get_subImages.check_projection_rasters(
        image_tile_list)  # it will raise errors if found problems

    get_subImages.check_1or3band_8bit(
        image_tile_list)  # it will raise errors if found problems

    # need to check: the shape file and raster should have the same projection.
    if get_subImages.get_projection_proj4(
            polygons_shp) != get_subImages.get_projection_proj4(
                image_tile_list[0]):
        raise ValueError(
            'error, the input raster (e.g., %s) and vector (%s) files don\'t have the same projection'
            % (image_tile_list[0], polygons_shp))

    # check these are EPSG:4326 projection
    if get_subImages.get_projection_proj4(
            polygons_shp).strip() == '+proj=longlat +datum=WGS84 +no_defs':
        bufferSize = get_subImages.meters_to_degress_onEarth(bufferSize)

    pre_name = os.path.splitext(os.path.basename(image_tile_list[0]))[0]

    saved_dir = os.path.join(saved_dir, pre_name + '_subImages')
    if os.path.isdir(saved_dir) is False:
        io_function.mkdir(saved_dir)

    get_sub_images_and_json_files(polygons_shp,
                                  class_names,
                                  bufferSize,
                                  image_tile_list,
                                  saved_dir,
                                  pre_name,
                                  dstnodata,
                                  brectangle=b_rectangle,
                                  proc_num=process_num)
def copy_subImages_labels_directly(subImage_dir, subLabel_dir, area_ini):

    input_image_dir = parameters.get_directory_None_if_absence(
        area_ini, 'input_image_dir')
    # it is ok consider a file name as pattern and pass it the following functions to get file list
    input_image_or_pattern = parameters.get_string_parameters(
        area_ini, 'input_image_or_pattern')

    # label raster folder
    label_raster_dir = parameters.get_directory_None_if_absence(
        area_ini, 'label_raster_dir')
    sub_images_list = []
    label_path_list = []

    if os.path.isdir(subImage_dir) is False:
        io_function.mkdir(subImage_dir)
    if os.path.isdir(subLabel_dir) is False:
        io_function.mkdir(subLabel_dir)

    sub_images = io_function.get_file_list_by_pattern(input_image_dir,
                                                      input_image_or_pattern)
    for sub_img in sub_images:
        # find the corresponding label raster
        label_name = io_function.get_name_by_adding_tail(
            os.path.basename(sub_img), 'label')
        label_path = os.path.join(label_raster_dir, label_name)
        if os.path.isfile(label_path):
            sub_images_list.append(sub_img)
            label_path_list.append(label_path)
        else:
            print('Warning, cannot find label for %s in %s' %
                  (sub_img, label_raster_dir))

    # copy sub-images, adding to txt files
    with open('sub_images_labels_list.txt', 'a') as f_obj:
        for tif_path, label_file in zip(sub_images_list, label_path_list):
            if label_file is None:
                continue
            dst_subImg = os.path.join(subImage_dir, os.path.basename(tif_path))

            # copy sub-images
            io_function.copy_file_to_dst(tif_path, dst_subImg, overwrite=True)

            dst_label_file = os.path.join(subLabel_dir,
                                          os.path.basename(label_file))
            io_function.copy_file_to_dst(label_file,
                                         dst_label_file,
                                         overwrite=True)

            sub_image_label_str = dst_subImg + ":" + dst_label_file + '\n'
            f_obj.writelines(sub_image_label_str)
def main(options, args):
    extent_shp_or_ids_txt = args[0]
    process_num = options.process_num
    keep_dem_percent = options.keep_dem_percent
    o_res = options.out_res

    basic.setlogfile('produce_headwall_shp_ArcticDEM_log_%s.txt' %
                     timeTools.get_now_time_str())

    if os.path.isdir(grid_dem_headwall_shp_dir) is False:
        io_function.mkdir(grid_dem_headwall_shp_dir)

        # read grids and ids
    time0 = time.time()
    all_grid_polys, all_ids = vector_gpd.read_polygons_attributes_list(
        grid_20_shp, 'id')
    print('time cost of read polygons and attributes', time.time() - time0)

    # get grid ids based on input extent
    grid_base_name = os.path.splitext(
        os.path.basename(extent_shp_or_ids_txt))[0]
    grid_polys, grid_ids = get_grid_20(extent_shp_or_ids_txt, all_grid_polys,
                                       all_ids)

    # check dem difference existence
    grid_headwall_shps, grid_id_no_headwall_shp = get_existing_grid_headwall_shp(
        grid_dem_headwall_shp_dir, grid_base_name, grid_ids)
    if len(grid_id_no_headwall_shp) > 0:
        # refine grid_polys
        if len(grid_ids) > len(grid_id_no_headwall_shp):
            id_index = [grid_ids.index(id) for id in grid_id_no_headwall_shp]
            grid_polys = [grid_polys[idx] for idx in id_index]

        reg_tifs = io_function.get_file_list_by_ext('.tif',
                                                    arcticDEM_reg_tif_dir,
                                                    bsub_folder=False)
        reg_tifs = [tif for tif in reg_tifs
                    if 'matchtag' not in tif]  # remove matchtag
        #
        headwall_shp_folders = extract_headwall_grids(grid_polys,
                                                      grid_id_no_headwall_shp,
                                                      grid_base_name,
                                                      reg_tifs,
                                                      b_mosaic_id,
                                                      b_mosaic_date,
                                                      keep_dem_percent,
                                                      o_res,
                                                      process_num=process_num)
def save_id_grid_no_subsidence(grid_id):
    # grid_no_headwall_txt
    if os.path.isdir(process_log_dir) is False:
        io_function.mkdir(process_log_dir)

    id_list = []
    if os.path.isfile(grid_no_subscidence_poly_txt):
        id_list = io_function.read_list_from_txt(grid_no_subscidence_poly_txt)    # no need covert to int
    id_str = str(grid_id)
    if id_str in id_list:
        return True
    else:
        # save by adding one line
        with open(grid_no_subscidence_poly_txt,'a') as f_obj:
            f_obj.writelines(str(grid_id) + '\n')
        return True
Exemple #20
0
def add_id_grid_to_txt(grid_id, txt_path):
    # grid_no_valid_dem_ids.txt
    if os.path.isdir(process_log_dir) is False:
        io_function.mkdir(process_log_dir)
    # update txt file
    id_list = []
    if os.path.isfile(txt_path):
        id_list = io_function.read_list_from_txt(txt_path)    # no need covert to int
    id_str = str(grid_id)
    if id_str in id_list:
        return True
    else:
        # save by adding one line
        with open(txt_path,'a') as f_obj:
            f_obj.writelines(str(grid_id) + '\n')
        return True
def submit_hillshade_newest_headwall_line_grid_job(ids_list, idx,
                                                   grid_base_name,
                                                   max_job_count):

    wait_if_reach_max_jobs(max_job_count, 'dLi')  # draw Line on hillshade

    job_name = 'dLi%d' % idx
    check_length_jobname(job_name)
    work_dir = working_dir_string(idx,
                                  'hillshade_newest_headwall_line_',
                                  root=root_dir)
    if os.path.isdir(work_dir) is False:
        io_function.mkdir(work_dir)
        os.chdir(work_dir)

        ids_list = [str(item) for item in ids_list]
        io_function.save_list_to_txt(grid_base_name + '.txt', ids_list)

        # prepare job
        sh_list = [
            'hillshade_headwall_line_grid.sh',
            'job_hillshade_headwall_line_grid.sh'
        ]
        copy_curc_job_files(jobsh_dir, work_dir, sh_list)
        slurm_utility.modify_slurm_job_sh(
            'job_hillshade_headwall_line_grid.sh', 'job-name', job_name)
    else:
        os.chdir(work_dir)
        submit_job_names = slurm_utility.get_submited_job_names(curc_username)
        if job_name in submit_job_names:
            print(
                'The folder: %s already exist and the job has been submitted, skip submitting a new job'
                % work_dir)
            return

        # job is completed
        if os.path.isfile('done.txt'):
            print('The job in the folder: %s is Done' % work_dir)
            return

    # submit the job
    # sometime, when submit a job, end with: singularity: command not found,and exist, wired, then try run submit a job in scomplie note
    submit_job_curc_or_run_script_local('job_hillshade_headwall_line_grid.sh',
                                        'hillshade_headwall_line_grid.sh')

    os.chdir(curr_dir_before_start)
Exemple #22
0
def copy_original_mapped_polygons(curr_dir_before_ray, work_dir):
    # when ray start a process, we need to add code_dir again and import user-defined modules
    import basic_src.io_function as io_function
    org_dir = os.path.join(curr_dir_before_ray, 'multi_inf_results')
    save_dir = os.path.join(work_dir, 'multi_inf_results')

    shp_list = io_function.get_file_list_by_pattern(org_dir, '*/*.shp')
    shp_list = [
        item for item in shp_list if 'post' not in os.path.basename(item)
    ]  # remove 'post' ones
    for shp in shp_list:
        area_dir = os.path.join(save_dir,
                                os.path.basename(os.path.dirname(shp)))
        if os.path.isdir(area_dir) is False:
            io_function.mkdir(area_dir)
        dst_path = os.path.join(area_dir, os.path.basename(shp))
        io_function.copy_shape_file(shp, dst_path)
Exemple #23
0
def polygons2geojson(input_shp, save_folder):
    '''
    convert polygons in shapefiles to many geojson (each for one polygon)
    :param input_shp:
    :param save_folder:
    :return:
    '''
    io_function.is_file_exist(input_shp)
    if os.path.isdir(save_folder) is False:
        io_function.mkdir(save_folder)

    polygons, ids = vector_gpd.read_polygons_attributes_list(input_shp, 'id')
    prj_info = map_projection.get_raster_or_vector_srs_info_epsg(
        input_shp)  # geojson need EPSG, such as "EPSG:3413"
    # print(prj_info)
    for poly, id in zip(polygons, ids):
        save_one_polygon_2geojson(poly, id, prj_info, save_folder)
def make_note_all_task_done(extent_shp, reomte_node):
    if os.path.isdir(grid_ids_txt_dir) is False:
        io_function.mkdir(grid_ids_txt_dir)

    shp_grid_id_txt, log_grid_ids_txt, log_grid_ids_txt_done = get_extent_grid_id_txt_done_files(
        extent_shp)

    # shp_grid_id_txt should be in the current folder
    if os.path.isfile(log_grid_ids_txt) is False:
        io_function.copy_file_to_dst(shp_grid_id_txt, log_grid_ids_txt)

    if os.path.isfile(log_grid_ids_txt_done) is False:
        io_function.save_list_to_txt(log_grid_ids_txt_done, ['Done'])
        # copy the curc
        r_grid_ids_txt_dir = '/scratch/summit/lihu9680/ArcticDEM_tmp_dir/grid_ids_txt'
        scp_communicate.copy_file_folder_to_remote_machine(
            reomte_node, r_grid_ids_txt_dir, log_grid_ids_txt_done)
Exemple #25
0
def main():
    basic.setlogfile('log_convert_dem_diff_to8bit.txt')
    if os.path.isdir(grid_dem_diffs_8bit_dir) is False:
        io_function.mkdir(grid_dem_diffs_8bit_dir)

    dem_diff_list = io_function.get_file_list_by_pattern(grid_dem_diffs_dir,'*DEM_diff_grid*.tif')
    count = len(dem_diff_list)
    failed_tifs = []
    for idx, tif in enumerate(dem_diff_list):
        print('%d/%d convert %s to 8 bit'%(idx+1, count, tif))
        tif_8bit = io_function.get_name_by_adding_tail(tif, '8bit')
        output = os.path.join(grid_dem_diffs_8bit_dir, os.path.basename(tif_8bit))
        if dem_tif_to_8bit(tif,output) is False:
            failed_tifs.append(tif)

    if len(failed_tifs)>0:
        io_function.save_list_to_txt('failed_dem_diff_to8bit.txt',failed_tifs)
Exemple #26
0
def test_merge_polygon_for_demDiff_headwall_grids():
    dem_subsidence_shp = os.path.expanduser(
        '~/Data/tmp_data/segment_result_grid9999/ala_north_slo_extent_latlon_grid_ids_DEM_diff_grid9999_8bit_post_final.shp'
    )
    headwall_shp_list = io_function.get_file_list_by_pattern(
        os.path.expanduser('~/Data/tmp_data/headwall_shps_grid9999'), '*.shp')

    # merge the results of these two
    output_dir = os.path.join(grid_dem_subsidence_select,
                              'subsidence_grid%d' % 9999)
    if os.path.isdir(output_dir) is False:
        io_function.mkdir(output_dir)

    merge_polygon_for_demDiff_headwall_grids(dem_subsidence_shp,
                                             headwall_shp_list,
                                             output_dir,
                                             buffer_size=50)
def segment_subsidence_grey_image_v2(dem_diff_grey_8bit, dem_diff, save_dir,process_num, subsidence_thr_m=-0.5, min_area=40, max_area=100000000):
    '''
    segment subsidence areas based on 8bit dem difference
    :param dem_diff_grey_8bit:
    :param dem_diff:
    :param save_dir:
    :param process_num:
    :param subsidence_thr_m: mean value less than this one consider as subsidence (in meter)
    :param min_area: min size in m^2 (defualt is 40 m^2, 10 pixels on ArcticDEM)
    :param max_area: min size in m^2 (default is 10km by 10 km)
    :return:
    '''

    io_function.is_file_exist(dem_diff_grey_8bit)

    out_pre = os.path.splitext(os.path.basename(dem_diff_grey_8bit))[0]
    segment_shp_path = os.path.join(save_dir, out_pre + '.shp')

    # get initial polygons
    # because the label from segmentation for superpixels are not unique, so we may need to get mean dem diff based on polygons, set org_raster=None
    label_path_list = segment_a_grey_image(dem_diff_grey_8bit,save_dir,process_num, org_raster=None,b_save_patch_label=True)

    patch_shp_list = polygonize_label_images(label_path_list, org_raster=dem_diff, stats=['mean', 'std', 'count'], prefix='demD',
                            process_num=process_num, b_remove_nodata=True)

    # post-processing for each patch shp
    post_patch_shp_list = []
    for idx, shp in enumerate(patch_shp_list):
        # get DEM diff information for each polygon.
        post_shp = get_dem_subscidence_polygons(shp, dem_diff, dem_diff_thread_m=subsidence_thr_m,
                                     min_area=min_area, max_area=max_area, process_num=1)
        if post_shp is not None:
            post_patch_shp_list.append(post_shp)

    # merge shapefile
    if os.path.isdir(save_dir) is False:
        io_function.mkdir(save_dir)
    vector_gpd.merge_shape_files(post_patch_shp_list,segment_shp_path)

    # post-processing again
    dem_diff_shp = get_dem_subscidence_polygons(segment_shp_path, dem_diff, dem_diff_thread_m=subsidence_thr_m,
                                            min_area=min_area, max_area=max_area, process_num=1)

    basic.outputlogMessage('obtain elevation reduction polygons: %s'%dem_diff_shp)
    return True
Exemple #28
0
class TestdeeplabTrainclass():

    if os.path.isdir('split_images'):
        io_function.delete_file_or_dir('split_images')
    if os.path.isdir('split_labels'):
        io_function.delete_file_or_dir('split_labels')

    io_function.mkdir('split_images')
    io_function.mkdir('split_labels')

    def test_split_a_pair_sub_image_label(self):

        ### split the training image to many small patch (480*480)
        patch_w= 160 # parameters.get_string_parameters(para_file,'train_patch_width')
        patch_h= 160 #parameters.get_string_parameters(para_file,'train_patch_height')

        # notes
        # set overlay as 80, then width or height of patches range from 240 to 320.
        # so it will generate more patches than 160 ones

        # overlay_x= 80 # parameters.get_string_parameters(para_file,'train_pixel_overlay_x')
        # overlay_y= 80 #parameters.get_string_parameters(para_file,'train_pixel_overlay_y')

        overlay_x= 160 # parameters.get_string_parameters(para_file,'train_pixel_overlay_x')
        overlay_y= 160 #parameters.get_string_parameters(para_file,'train_pixel_overlay_y')

        split_image_format= '.png' # parameters.get_string_parameters(para_file,'split_image_format')

        trainImg_dir= 'subImages' #  parameters.get_string_parameters(para_file,'input_train_dir')
        labelImg_dir= 'subLabels' # parameters.get_string_parameters(para_file,'input_label_dir')

        if os.path.isdir(trainImg_dir) is False:
            raise IOError('%s not in the current folder, please get subImages first'%trainImg_dir)
        if os.path.isdir(labelImg_dir) is False:
            raise IOError('%s not in the current folder, please get subImages first'%labelImg_dir)

        # sub_img_label_txt = 'sub_images_labels_list_test.txt'
        sub_img_label_txt = 'sub_images_labels_list_1.txt'
        if os.path.isfile(sub_img_label_txt) is False:
            raise IOError('%s not in the current folder, please get subImages first' % sub_img_label_txt)

        with open(sub_img_label_txt) as txt_obj:
            line_list = [name.strip() for name in txt_obj.readlines()]
            for line in line_list:
                split_sub_images.split_a_pair_sub_image_label(line, patch_w, patch_h, overlay_x, overlay_y, split_image_format)
Exemple #29
0
def test_darknet_batch_detection_rs_images():
    print('\n')
    print('Run test_darknet_batch_detection_rs_images')

    config_file = 'yolov4_obj_oneband.cfg'
    yolo_data = os.path.join('data', 'obj.data')
    weights = os.path.join('exp1', 'yolov4_obj_oneband_best.weights')
    batch_size = 1

    # these three have the same size
    # image_names = ['20200818_mosaic_8bit_rgb_p_1001.png'] #, '20200818_mosaic_8bit_rgb_p_1038.png', '20200818_mosaic_8bit_rgb_p_1131.png']
    image_names = ['Alaska_north_slope_hillshade_20170426_poly_28_p_0.png'] #, '20200818_mosaic_8bit_rgb_p_1038.png', '20200818_mosaic_8bit_rgb_p_1131.png']
    image_names = [os.path.join('debug_img', item) for item in image_names]
    image_path = image_names[0]
    save_dir = './'

    height, width, band_num, date_type = raster_io.get_height_width_bandnum_dtype(image_path)
    # print('input image: height, width, band_num, date_type',height, width, band_num, date_type)

    # divide the image the many small patches, then calcuate one by one, solving memory issues.
    patch_w = 480
    patch_h = 480
    overlay_x = 160
    overlay_y = 160
    image_patches = split_image.sliding_window(width,height,patch_w,patch_h,adj_overlay_x=overlay_x,adj_overlay_y=overlay_y)
    patch_count = len(image_patches)

    if os.path.isdir(save_dir) is False:
        io_function.mkdir(save_dir)

    # group patches based on size, each patch is (xoff,yoff ,xsize, ysize)
    patch_groups = {}
    for patch in image_patches:
        wh_str = 'w%d'%patch[2] + '_' + 'h%d'%patch[3]
        if wh_str in patch_groups.keys():
            patch_groups[wh_str].append(patch)
        else:
            patch_groups[wh_str] = [patch]


    # load network
    network, class_names, class_colors = load_darknet_network(config_file, yolo_data, weights, batch_size=batch_size)

    darknet_batch_detection_rs_images(network, image_path, save_dir, patch_groups, patch_count, class_names, batch_size,
                                      thresh=0.25, hier_thresh=.5, nms=.45)
Exemple #30
0
def test_dataloader():
    # test, run in ~/Data/tmp_data/test_mmsegmentation/test_landuse_dl
    para_file = 'main_para.ini'
    set_pythonpath(para_file)
    expr_name = parameters.get_string_parameters(para_file, 'expr_name')
    trained_model = '%s/latest.pth' % expr_name

    # test rgb, using rgb in Willow River
    # img_idx = 0
    # image_path = os.path.expanduser('~/Data/Arctic/canada_arctic/Willow_River/Planet2020/20200818_mosaic_8bit_rgb.tif')
    # img_save_dir = os.path.join('predict_output','I%d' % img_idx)
    # io_function.mkdir(img_save_dir)
    # inf_list_file = os.path.join('predict_output','%d.txt'%img_idx)
    # gpuid = None
    # predict_one_image_mmseg(para_file, image_path, img_save_dir, inf_list_file, gpuid, trained_model)
    #
    # # curr_dir,img_idx, area_save_dir, test_id
    # curr_dir = os.getcwd()
    # inf_results_to_shapefile(curr_dir,img_idx,'predict_output','1')

    ############ test nirGB, using rgb in Willow River
    # img_idx = 1
    # image_path = os.path.expanduser('~/Data/Arctic/canada_arctic/Willow_River/Planet2020/20200818_mosaic_8bit_nirGB.tif')
    # img_save_dir = os.path.join('predict_output','I%d' % img_idx)
    # io_function.mkdir(img_save_dir)
    # inf_list_file = os.path.join('predict_output','%d.txt'%img_idx)
    # gpuid = None
    #
    # predict_one_image_mmseg(para_file, image_path, img_save_dir, inf_list_file, gpuid, trained_model)
    #
    # # curr_dir,img_idx, area_save_dir, test_id
    # curr_dir = os.getcwd()
    # inf_results_to_shapefile(curr_dir,img_idx,'predict_output','1')

    ######### try a tiny images
    img_idx = 2
    image_path = os.path.expanduser(
        '~/Data/test_mmsegmentation/test_landuse_dl/WR_nirGB_sub_images/20200818_mosaic_8bit_nirGB_0_class_1.tif'
    )
    img_save_dir = os.path.join('predict_output', 'I%d' % img_idx)
    io_function.mkdir(img_save_dir)
    inf_list_file = os.path.join('predict_output', '%d.txt' % img_idx)
    gpuid = None
    predict_one_image_mmseg(para_file, image_path, img_save_dir, inf_list_file,
                            gpuid, trained_model)