예제 #1
0
def add_polygon_attributes(input, output, para_file, data_para_file):

    if io_function.is_file_exist(input) is False:
        return False

    # copy output
    if io_function.copy_shape_file(input, output) is False:
        raise IOError('copy shape file %s failed' % input)

    # remove narrow parts of mapped polygons
    polygon_narrow_part_thr = parameters.get_digit_parameters_None_if_absence(
        para_file, 'mapped_polygon_narrow_threshold', 'float')
    #  if it is not None, then it will try to remove narrow parts of polygons
    if polygon_narrow_part_thr is not None and polygon_narrow_part_thr > 0:
        # use the buffer operation to remove narrow parts of polygons
        basic.outputlogMessage(
            "start removing narrow parts (thr %.2f) in polygons" %
            (polygon_narrow_part_thr * 2))
        if vector_gpd.remove_narrow_parts_of_polygons_shp_NOmultiPolygon(
                input, output, polygon_narrow_part_thr):
            message = "Finished removing narrow parts (thr %.2f) in polygons and save to %s" % (
                polygon_narrow_part_thr * 2, output)
            basic.outputlogMessage(message)
        else:
            pass
    else:
        basic.outputlogMessage(
            "warning, mapped_polygon_narrow_threshold is not in the parameter file, skip removing narrow parts"
        )

    # calculate area, perimeter of polygons
    if cal_add_area_length_of_polygon(output) is False:
        return False

    # calculate the polygon information
    b_calculate_shape_info = parameters.get_bool_parameters_None_if_absence(
        para_file, 'b_calculate_shape_info')
    if b_calculate_shape_info:
        # remove "_shapeInfo.shp" to make it calculate shape information again
        os.system('rm *_shapeInfo.shp')
        if calculate_gully_information(output) is False:
            return False

    # add topography of each polygons
    dem_files, slope_files, aspect_files, dem_diff_files = get_topographic_files(
        data_para_file)
    if calculate_polygon_topography(output,
                                    para_file,
                                    dem_files,
                                    slope_files,
                                    aspect_files=aspect_files,
                                    dem_diffs=dem_diff_files) is False:
        basic.outputlogMessage(
            'Warning: calculate information of topography failed')
        # return False   #  don't return

    return True
예제 #2
0
def postProcess(para_file, inf_post_note, b_skip_getshp=False, test_id=None):
    # test_id is the related to training

    if os.path.isfile(para_file) is False:
        raise IOError('File %s not exists in current folder: %s' %
                      (para_file, os.getcwd()))

    # the test string in 'exe.sh'
    test_note = inf_post_note

    WORK_DIR = os.getcwd()

    SECONDS = time.time()

    expr_name = parameters.get_string_parameters(para_file, 'expr_name')
    network_setting_ini = parameters.get_string_parameters(
        para_file, 'network_setting_ini')

    inf_dir = parameters.get_directory(para_file, 'inf_output_dir')
    if test_id is None:
        test_id = os.path.basename(WORK_DIR) + '_' + expr_name

    # get name of inference areas
    multi_inf_regions = parameters.get_string_list_parameters(
        para_file, 'inference_regions')

    # run post-processing parallel
    # max_parallel_postProc_task = 8

    backup_dir = os.path.join(WORK_DIR, 'result_backup')
    io_function.mkdir(backup_dir)

    # loop each inference regions
    sub_tasks = []
    same_area_time_inis = group_same_area_time_observations(multi_inf_regions)
    region_eva_reports = {}
    for key in same_area_time_inis.keys():
        multi_observations = same_area_time_inis[key]
        area_name = parameters.get_string_parameters(
            multi_observations[0],
            'area_name')  # they have the same name and time
        area_time = parameters.get_string_parameters(multi_observations[0],
                                                     'area_time')
        merged_shp_list = []
        map_raster_list_2d = [None] * len(multi_observations)
        for area_idx, area_ini in enumerate(multi_observations):
            area_remark = parameters.get_string_parameters(
                area_ini, 'area_remark')
            area_save_dir, shp_pre, _ = get_observation_save_dir_shp_pre(
                inf_dir, area_name, area_time, area_remark, test_id)

            # get image list
            inf_image_dir = parameters.get_directory(area_ini, 'inf_image_dir')
            # it is ok consider a file name as pattern and pass it the following functions to get file list
            inf_image_or_pattern = parameters.get_string_parameters(
                area_ini, 'inf_image_or_pattern')
            inf_img_list = io_function.get_file_list_by_pattern(
                inf_image_dir, inf_image_or_pattern)
            img_count = len(inf_img_list)
            if img_count < 1:
                raise ValueError(
                    'No image for inference, please check inf_image_dir and inf_image_or_pattern in %s'
                    % area_ini)

            merged_shp = os.path.join(WORK_DIR, area_save_dir,
                                      shp_pre + '.shp')
            if b_skip_getshp:
                pass
            else:
                # post image one by one
                result_shp_list = []
                map_raster_list = []
                for img_idx, img_path in enumerate(inf_img_list):
                    out_shp, out_raster = inf_results_to_shapefile(
                        WORK_DIR, img_idx, area_save_dir, test_id)
                    if out_shp is None or out_raster is None:
                        continue
                    result_shp_list.append(os.path.join(WORK_DIR, out_shp))
                    map_raster_list.append(out_raster)
                # merge shapefiles
                if merge_shape_files(result_shp_list, merged_shp) is False:
                    continue
                map_raster_list_2d[area_idx] = map_raster_list

            merged_shp_list.append(merged_shp)

        if b_skip_getshp is False:
            # add occurrence to each polygons
            get_occurence_for_multi_observation(merged_shp_list)

        for area_idx, area_ini in enumerate(multi_observations):
            area_remark = parameters.get_string_parameters(
                area_ini, 'area_remark')
            area_save_dir, shp_pre, area_remark_time = get_observation_save_dir_shp_pre(
                inf_dir, area_name, area_time, area_remark, test_id)

            merged_shp = os.path.join(WORK_DIR, area_save_dir,
                                      shp_pre + '.shp')
            if os.path.isfile(merged_shp) is False:
                print('Warning, %s not exist, skip' % merged_shp)
                continue

            # add attributes to shapefile
            # add_attributes_script = os.path.join(code_dir,'datasets', 'get_polygon_attributes.py')
            shp_attributes = os.path.join(WORK_DIR, area_save_dir,
                                          shp_pre + '_post_NOrm.shp')
            # add_polygon_attributes(add_attributes_script,merged_shp, shp_attributes, para_file, area_ini )
            add_polygon_attributes(merged_shp, shp_attributes, para_file,
                                   area_ini)

            # remove polygons
            # rm_polygon_script = os.path.join(code_dir,'datasets', 'remove_mappedPolygons.py')
            shp_post = os.path.join(WORK_DIR, area_save_dir,
                                    shp_pre + '_post.shp')
            # remove_polygons(rm_polygon_script,shp_attributes, shp_post, para_file)
            remove_polygons_main(shp_attributes, shp_post, para_file)

            # evaluate the mapping results
            # eval_shp_script = os.path.join(code_dir,'datasets', 'evaluation_result.py')
            out_report = os.path.join(WORK_DIR, area_save_dir,
                                      shp_pre + '_evaluation_report.txt')
            # evaluation_polygons(eval_shp_script, shp_post, para_file, area_ini,out_report)
            evaluation_polygons(shp_post, para_file, area_ini, out_report)

            ##### copy and backup files ######
            # copy files to result_backup
            if len(test_note) > 0:
                backup_dir_area = os.path.join(
                    backup_dir, area_name + '_' + area_remark_time + '_' +
                    test_id + '_' + test_note)
            else:
                backup_dir_area = os.path.join(
                    backup_dir,
                    area_name + '_' + area_remark_time + '_' + test_id)
            io_function.mkdir(backup_dir_area)
            if len(test_note) > 0:
                bak_merged_shp = os.path.join(
                    backup_dir_area, '_'.join([shp_pre, test_note]) + '.shp')
                bak_post_shp = os.path.join(
                    backup_dir_area,
                    '_'.join([shp_pre, 'post', test_note]) + '.shp')
                bak_eva_report = os.path.join(
                    backup_dir_area,
                    '_'.join([shp_pre, 'eva_report', test_note]) + '.txt')
                bak_area_ini = os.path.join(
                    backup_dir_area,
                    '_'.join([shp_pre, 'region', test_note]) + '.ini')
            else:
                bak_merged_shp = os.path.join(backup_dir_area,
                                              '_'.join([shp_pre]) + '.shp')
                bak_post_shp = os.path.join(
                    backup_dir_area, '_'.join([shp_pre, 'post']) + '.shp')
                bak_eva_report = os.path.join(
                    backup_dir_area,
                    '_'.join([shp_pre, 'eva_report']) + '.txt')
                bak_area_ini = os.path.join(
                    backup_dir_area, '_'.join([shp_pre, 'region']) + '.ini')

            io_function.copy_shape_file(merged_shp, bak_merged_shp)
            io_function.copy_shape_file(shp_post, bak_post_shp)
            if os.path.isfile(out_report):
                io_function.copy_file_to_dst(out_report,
                                             bak_eva_report,
                                             overwrite=True)
            io_function.copy_file_to_dst(area_ini,
                                         bak_area_ini,
                                         overwrite=True)

            # copy map raster
            b_backup_map_raster = parameters.get_bool_parameters_None_if_absence(
                area_ini, 'b_backup_map_raster')
            if b_backup_map_raster is True:
                if map_raster_list_2d[area_idx] is not None:
                    for map_tif in map_raster_list_2d[area_idx]:
                        bak_map_tif = os.path.join(backup_dir_area,
                                                   os.path.basename(map_tif))
                        io_function.copy_file_to_dst(map_tif,
                                                     bak_map_tif,
                                                     overwrite=True)

            region_eva_reports[shp_pre] = bak_eva_report

    if len(test_note) > 0:
        bak_para_ini = os.path.join(
            backup_dir, '_'.join([test_id, 'para', test_note]) + '.ini')
        bak_network_ini = os.path.join(
            backup_dir, '_'.join([test_id, 'network', test_note]) + '.ini')
        bak_time_cost = os.path.join(
            backup_dir, '_'.join([test_id, 'time_cost', test_note]) + '.txt')
    else:
        bak_para_ini = os.path.join(backup_dir,
                                    '_'.join([test_id, 'para']) + '.ini')
        bak_network_ini = os.path.join(backup_dir,
                                       '_'.join([test_id, 'network']) + '.ini')
        bak_time_cost = os.path.join(backup_dir,
                                     '_'.join([test_id, 'time_cost']) + '.txt')
    io_function.copy_file_to_dst(para_file, bak_para_ini)
    io_function.copy_file_to_dst(network_setting_ini, bak_network_ini)
    if os.path.isfile('time_cost.txt'):
        io_function.copy_file_to_dst('time_cost.txt', bak_time_cost)

    # output the evaluation report to screen
    for key in region_eva_reports.keys():
        report = region_eva_reports[key]
        if os.path.isfile(report) is False:
            continue
        print('evaluation report for %s:' % key)
        os.system('head -n 7 %s' % report)

    # output evaluation report to table
    if len(test_note) > 0:
        out_table = os.path.join(
            backup_dir,
            '_'.join([test_id, 'accuracy_table', test_note]) + '.xlsx')
    else:
        out_table = os.path.join(
            backup_dir, '_'.join([test_id, 'accuracy_table']) + '.xlsx')
    eva_reports = [
        region_eva_reports[key] for key in region_eva_reports
        if os.path.isfile(region_eva_reports[key])
    ]
    eva_report_to_tables.eva_reports_to_table(eva_reports, out_table)

    duration = time.time() - SECONDS
    os.system(
        'echo "$(date): time cost of post-procesing: %.2f seconds">>time_cost.txt'
        % duration)
예제 #3
0
def get_sub_images_multi_regions(para_file):

    print(
        "extract sub-images and sub-labels for a given shape file (training polygons)"
    )

    if os.path.isfile(para_file) is False:
        raise IOError('File %s not exists in current folder: %s' %
                      (para_file, os.getcwd()))

    get_subImage_script = os.path.join(code_dir, 'datasets',
                                       'get_subImages.py')
    SECONDS = time.time()

    # get name of training areas
    multi_training_regions = parameters.get_string_list_parameters_None_if_absence(
        para_file, 'training_regions')
    if multi_training_regions is None or len(multi_training_regions) < 1:
        raise ValueError('No training area is set in %s' % para_file)

    # multi_training_files = parameters.get_string_parameters_None_if_absence(para_file, 'multi_training_files')

    dstnodata = parameters.get_string_parameters(para_file, 'dst_nodata')
    buffersize = parameters.get_string_parameters(para_file, 'buffer_size')
    rectangle_ext = parameters.get_string_parameters(para_file,
                                                     'b_use_rectangle')
    process_num = parameters.get_digit_parameters(para_file, 'process_num',
                                                  'int')

    b_no_label_image = parameters.get_bool_parameters_None_if_absence(
        para_file, 'b_no_label_image')

    if os.path.isfile('sub_images_labels_list.txt'):
        io_function.delete_file_or_dir('sub_images_labels_list.txt')

    subImage_dir = parameters.get_string_parameters_None_if_absence(
        para_file, 'input_train_dir')
    subLabel_dir = parameters.get_string_parameters_None_if_absence(
        para_file, 'input_label_dir')

    # loop each training regions
    for idx, area_ini in enumerate(multi_training_regions):

        input_image_dir = parameters.get_directory_None_if_absence(
            area_ini, 'input_image_dir')

        # it is ok consider a file name as pattern and pass it the following functions to get file list
        input_image_or_pattern = parameters.get_string_parameters(
            area_ini, 'input_image_or_pattern')

        b_sub_images_json = parameters.get_bool_parameters(
            area_ini, 'b_sub_images_json')
        if b_sub_images_json is True:
            # copy sub-images, then covert json files to label images.
            object_names = parameters.get_string_list_parameters(
                para_file, 'object_names')
            get_subImages_json.get_subimages_label_josn(
                input_image_dir,
                input_image_or_pattern,
                subImage_dir,
                subLabel_dir,
                object_names,
                b_no_label_image=b_no_label_image,
                process_num=process_num)

            pass
        else:

            all_train_shp = parameters.get_file_path_parameters_None_if_absence(
                area_ini, 'training_polygons')
            train_shp = parameters.get_string_parameters(
                area_ini, 'training_polygons_sub')

            # get subImage and subLabel for one training polygons
            print(
                'extract training data from image folder (%s) and polgyons (%s)'
                % (input_image_dir, train_shp))
            if b_no_label_image is True:
                get_subImage_one_shp(get_subImage_script,
                                     all_train_shp,
                                     buffersize,
                                     dstnodata,
                                     rectangle_ext,
                                     train_shp,
                                     input_image_dir,
                                     file_pattern=input_image_or_pattern,
                                     process_num=process_num)
            else:
                get_subImage_subLabel_one_shp(
                    get_subImage_script,
                    all_train_shp,
                    buffersize,
                    dstnodata,
                    rectangle_ext,
                    train_shp,
                    input_image_dir,
                    file_pattern=input_image_or_pattern,
                    process_num=process_num)

    # check black sub-images or most part of the sub-images is black (nodata)
    new_sub_image_label_list = []
    delete_sub_image_label_list = []
    subImage_dir_delete = subImage_dir + '_delete'
    subLabel_dir_delete = subLabel_dir + '_delete'
    io_function.mkdir(subImage_dir_delete)
    if b_no_label_image is None or b_no_label_image is False:
        io_function.mkdir(subLabel_dir_delete)
    get_valid_percent_entropy.plot_valid_entropy(subImage_dir)
    with open('sub_images_labels_list.txt', 'r') as f_obj:
        lines = f_obj.readlines()
        for line in lines:
            image_path, label_path = line.strip().split(':')
            # valid_per = raster_io.get_valid_pixel_percentage(image_path)
            valid_per, entropy = raster_io.get_valid_percent_shannon_entropy(
                image_path)  # base=10
            if valid_per > 60 and entropy >= 0.5:
                new_sub_image_label_list.append(line)
            else:
                delete_sub_image_label_list.append(line)
                io_function.movefiletodir(image_path, subImage_dir_delete)
                if os.path.isfile(label_path):
                    io_function.movefiletodir(label_path, subLabel_dir_delete)
    if len(delete_sub_image_label_list) > 0:
        with open('sub_images_labels_list.txt', 'w') as f_obj:
            for line in new_sub_image_label_list:
                f_obj.writelines(line)

    # check weather they have the same subImage and subLabel
    if b_no_label_image is None or b_no_label_image is False:
        sub_image_list = io_function.get_file_list_by_pattern(
            subImage_dir, '*.tif')
        sub_label_list = io_function.get_file_list_by_pattern(
            subLabel_dir, '*.tif')
        if len(sub_image_list) != len(sub_label_list):
            raise ValueError(
                'the count of subImage (%d) and subLabel (%d) is different' %
                (len(sub_image_list), len(sub_label_list)))

    # save brief information of sub-images
    height_list = []
    width_list = []
    band_count = 0
    dtype = 'unknown'
    for line in new_sub_image_label_list:
        image_path, label_path = line.strip().split(':')
        height, width, band_count, dtype = raster_io.get_height_width_bandnum_dtype(
            image_path)
        height_list.append(height)
        width_list.append(width)
    # save info to file, if it exists, it will be overwritten
    img_count = len(new_sub_image_label_list)
    with open('sub_images_patches_info.txt', 'w') as f_obj:
        f_obj.writelines('information of sub-images: \n')
        f_obj.writelines('number of sub-images : %d \n' % img_count)
        f_obj.writelines('band count : %d \n' % band_count)
        f_obj.writelines('data type : %s \n' % dtype)
        f_obj.writelines('maximum width and height: %d, %d \n' %
                         (max(width_list), max(height_list)))
        f_obj.writelines('minimum width and height: %d, %d \n' %
                         (min(width_list), min(height_list)))
        f_obj.writelines(
            'mean width and height: %.2f, %.2f \n\n' %
            (sum(width_list) / img_count, sum(height_list) / img_count))

    duration = time.time() - SECONDS
    os.system(
        'echo "$(date): time cost of getting sub images and labels: %.2f seconds">>time_cost.txt'
        % duration)
예제 #4
0
def main(options, args):
    input = args[0]
    output = args[1]

    if io_function.is_file_exist(input) is False:
        return False

    data_para_file = options.data_para
    if data_para_file is None:
        data_para_file = options.para_file
    ## remove non-gully polygons
    # output_rm_nonclass = io_function.get_name_by_adding_tail(input, 'rm_nonclass')
    # if remove_nonclass_polygon(input,output_rm_nonclass,field_name='svmclass') is False:
    #     return False

    # merge the touched polygons
    # ouput_merged = io_function.get_name_by_adding_tail(input,'merged')
    # if merge_polygons_in_gully(input,ouput_merged) is False:
    #     return False
    # ouput_merged = input

    # copy output
    if io_function.copy_shape_file(input, output) is False:
        raise IOError('copy shape file %s failed' % input)
    else:
        pass

    # remove narrow parts of mapped polygons
    polygon_narrow_part_thr = parameters.get_digit_parameters_None_if_absence(
        '', 'mapped_polygon_narrow_threshold', 'float')
    #  if it is not None, then it will try to remove narrow parts of polygons
    if polygon_narrow_part_thr is not None and polygon_narrow_part_thr > 0:
        # use the buffer operation to remove narrow parts of polygons
        basic.outputlogMessage(
            "start removing narrow parts (thr %.2f) in polygons" %
            (polygon_narrow_part_thr * 2))
        if vector_gpd.remove_narrow_parts_of_polygons_shp_NOmultiPolygon(
                input, output, polygon_narrow_part_thr):
            message = "Finished removing narrow parts (thr %.2f) in polygons and save to %s" % (
                polygon_narrow_part_thr * 2, output)
            basic.outputlogMessage(message)
        else:
            pass
    else:
        basic.outputlogMessage(
            "warning, mapped_polygon_narrow_threshold is not in the parameter file, skip removing narrow parts"
        )

    # calculate area, perimeter of polygons
    if cal_add_area_length_of_polygon(output) is False:
        return False

    # calculate the polygon information
    b_calculate_shape_info = parameters.get_bool_parameters_None_if_absence(
        '', 'b_calculate_shape_info')
    if b_calculate_shape_info:
        # remove "_shapeInfo.shp" to make it calculate shape information again
        os.system('rm *_shapeInfo.shp')
        if calculate_gully_information(output) is False:
            return False

    # # remove small and not narrow polygons
    # if options.min_area is None:
    #     basic.outputlogMessage('minimum area is required for remove polygons')
    #     return False
    # area_thr = options.min_area
    #
    # if options.min_ratio is None:
    #     basic.outputlogMessage('minimum ratio of perimeter/area is required for remove polygons')
    #     return False
    # ratio_thr = options.min_ratio

    # if remove_small_round_polygons(ouput_merged,output,area_thr,ratio_thr) is False:
    #     return False

    # add topography of each polygons
    dem_file, slope_file, aspect_file, dem_diff_file = get_topographic_files(
        data_para_file)
    if calculate_polygon_topography(output,
                                    dem_file,
                                    slope_file,
                                    aspect_file=aspect_file,
                                    dem_diff=dem_diff_file) is False:
        basic.outputlogMessage(
            'Warning: calculate information of topography failed')
        # return False   #  don't return

    # add hydrology information
    flow_accum = parameters.get_flow_accumulation()
    if os.path.isfile(flow_accum):
        if calculate_hydrology(output, flow_accum) is False:
            basic.outputlogMessage(
                'Warning: calculate information of hydrology failed')
            # return False  #  don't return
    else:
        basic.outputlogMessage(
            "warning, flow accumulation file not exist, skip the calculation of flow accumulation"
        )

    # # evaluation result
    # val_path = parameters.get_validation_shape()
    # if os.path.isfile(val_path):
    #     evaluation_result(output,val_path)
    # else:
    #     basic.outputlogMessage("warning, validation polygon not exist, skip evaluation")

    pass
예제 #5
0
def image_label_to_yolo_format(para_file):

    print("Image labels (semantic segmentation) to YOLO object detection")

    if os.path.isfile(para_file) is False:
        raise IOError('File %s not exists in current folder: %s' %
                      (para_file, os.getcwd()))

    img_ext = parameters.get_string_parameters_None_if_absence(
        para_file, 'split_image_format')
    proc_num = parameters.get_digit_parameters(para_file, 'process_num', 'int')

    SECONDS = time.time()

    # get image and label path
    image_list = []
    label_list = []
    with open(os.path.join('list', 'trainval.txt'), 'r') as f_obj:
        lines = [item.strip() for item in f_obj.readlines()]
        for line in lines:
            image_list.append(os.path.join('split_images', line + img_ext))
            label_list.append(os.path.join('split_labels', line + img_ext))

    num_classes_noBG = parameters.get_digit_parameters_None_if_absence(
        para_file, 'NUM_CLASSES_noBG', 'int')
    b_ignore_edge_objects = parameters.get_bool_parameters_None_if_absence(
        para_file, 'b_ignore_edge_objects')
    if b_ignore_edge_objects is None:
        b_ignore_edge_objects = False

    # get boxes
    total_count = len(image_list)
    for idx, (img, label) in enumerate(zip(image_list, label_list)):
        get_yolo_boxes_one_img(idx,
                               total_count,
                               img,
                               label,
                               num_classes_noBG,
                               rm_edge_obj=b_ignore_edge_objects)

    # write obj.data file
    train_sample_txt = parameters.get_string_parameters(
        para_file, 'training_sample_list_txt')
    val_sample_txt = parameters.get_string_parameters(
        para_file, 'validation_sample_list_txt')
    train_img_list = get_image_list('list', train_sample_txt, 'split_images',
                                    img_ext)
    val_img_list = get_image_list('list', val_sample_txt, 'split_images',
                                  img_ext)

    expr_name = parameters.get_string_parameters(para_file, 'expr_name')
    object_names = parameters.get_string_list_parameters(
        para_file, 'object_names')
    io_function.mkdir('data')
    io_function.mkdir(expr_name)

    with open(os.path.join('data', 'obj.data'), 'w') as f_obj:
        f_obj.writelines('classes = %d' % num_classes_noBG + '\n')

        train_txt = os.path.join('data', 'train.txt')
        io_function.save_list_to_txt(train_txt, train_img_list)
        f_obj.writelines('train = %s' % train_txt + '\n')

        val_txt = os.path.join('data', 'val.txt')
        io_function.save_list_to_txt(val_txt, val_img_list)
        f_obj.writelines('valid = %s' % val_txt + '\n')

        obj_name_txt = os.path.join('data', 'obj.names')
        io_function.save_list_to_txt(obj_name_txt, object_names)
        f_obj.writelines('names = %s' % obj_name_txt + '\n')

        f_obj.writelines('backup = %s' % expr_name + '\n')

    duration = time.time() - SECONDS
    os.system(
        'echo "$(date): time cost of converting to yolo format: %.2f seconds">>time_cost.txt'
        % duration)

    pass
예제 #6
0
def main(options, args):

    # get multi-temporal shapefile list
    para_file = options.para_file
    b_remove = parameters.get_bool_parameters_None_if_absence(
        para_file, 'b_remove_polygons_using_multitemporal_results')
    # exit
    if b_remove is None or b_remove is False:
        basic.outputlogMessage(
            'Warning, b_remove_polygons_using_multitemporal_results not set or is NO'
        )
        return True

    shp_dir = args[0]
    file_pattern = args[1]
    polyon_shps_list = io_function.get_file_list_by_pattern(
        shp_dir, file_pattern)
    if len(polyon_shps_list) < 2:
        raise ValueError(
            'Error, less than two shapefiles, cannot conduct multi-polygon anlysis'
        )

    # make polyon_shps_list in order: I0 to In
    polyon_shps_list.sort(
        key=lambda x: int(re.findall('I\d+', os.path.basename(x))[0][1:]))

    # print(polyon_shps_list)
    # sys.exit()

    # check projection of the shape file, should be the same
    new_shp_proj4 = map_projection.get_raster_or_vector_srs_info_proj4(
        polyon_shps_list[0])
    for idx in range(len(polyon_shps_list) - 1):
        shp_proj4 = map_projection.get_raster_or_vector_srs_info_proj4(
            polyon_shps_list[idx + 1])
        if shp_proj4 != new_shp_proj4:
            raise ValueError('error, projection insistence between %s and %s' %
                             (new_shp_proj4, shp_proj4))

    import remove_nonActive_thawSlumps
    import polygons_change_analyze

    # polygon change analysis
    polygons_change_analyze.cal_multi_temporal_iou_and_occurrence(
        polyon_shps_list, para_file)

    # remove non active polygons
    remove_nonActive_thawSlumps.remove_non_active_thaw_slumps(
        polyon_shps_list, para_file)

    # back up files and conduct evaluation
    for idx, shp_path in enumerate(polyon_shps_list):

        # evaluation files
        shp_rmTimeiou = io_function.get_name_by_adding_tail(
            shp_path, 'rmTimeiou')
        basic.outputlogMessage('(%d/%d) evaluation of %s' %
                               (idx, len(polyon_shps_list), shp_rmTimeiou))

        # evaluation
        args_list = [
            os.path.join(deeplabRS, 'evaluation_result.py'), '-p', para_file,
            shp_rmTimeiou
        ]
        if basic.exec_command_args_list_one_file(
                args_list, 'evaluation_report.txt') is False:
            return False

        I_idx_str = re.findall('I\d+', os.path.basename(shp_rmTimeiou))

        old_eva_report = io_function.get_file_list_by_pattern(
            shp_dir, I_idx_str[0] + '*eva_report*' + '.txt')
        old_eva_report = [
            item for item in old_eva_report if 'rmTimeiou' not in item
        ]

        old_eva_report_name = old_eva_report[0]

        eva_report_name = io_function.get_name_by_adding_tail(
            old_eva_report_name, 'rmTimeiou')
        # io_function.move_file_to_dst(old_eva_report,backup_eva_report)
        # io_function.move_file_to_dst('evaluation_report.txt', old_eva_report)
        io_function.move_file_to_dst('evaluation_report.txt',
                                     eva_report_name,
                                     overwrite=True)

        # back up the shape files (no_need)

    basic.outputlogMessage(
        'Finish removing polygons using multi-temporal mapping results')