def calculate_polygon_velocity(polygons_shp, los_file):
    """

    Args:
        polygons_shp:
        dem_file:

    Returns:

    """
    if io_function.is_file_exist(polygons_shp) is False:
        return False
    operation_obj = shape_opeation()

    # all_touched: bool, optional
    #     Whether to include every raster cell touched by a geometry, or only
    #     those having a center point within the polygon.
    #     defaults to `False`
    #   Since the dem usually is coarser, so we set all_touched = True
    all_touched = True

    # #DEM
    if io_function.is_file_exist(los_file):
        stats_list = ['mean', 'std']  # ['min', 'max', 'mean', 'count','median','std']
        if operation_obj.add_fields_from_raster(polygons_shp, los_file, "los", band=1, stats_list=stats_list,
                                                all_touched=all_touched) is False:
            return False
    else:
        basic.outputlogMessage("warning, LOS file not exist, skip the calculation of LOS information")

    return True
Пример #2
0
def save_false_positve_and_false_negative(result_shp, val_shp, para_file):
    """
    save false positive and false negative polygons in the mapped polygon based on IOU values
    :param result_shp: result shape file containing mapped polygons
    :param val_shp: shape file containing validation polygons
    :return:
    """

    assert io_function.is_file_exist(result_shp)
    assert io_function.is_file_exist(val_shp)
    basic.outputlogMessage('Input mapping result: %s' % result_shp)
    basic.outputlogMessage('Input ground truth: %s' % val_shp)

    IOU_threshold = parameters.get_IOU_threshold(parafile=para_file)
    basic.outputlogMessage('IOU threshold is: %f' % IOU_threshold)

    # calcuate IOU
    IOU_mapped_polygons = vector_features.calculate_IoU_scores(
        result_shp, val_shp)
    save_FP_path = io_function.get_name_by_adding_tail(result_shp, 'FP')
    # set False, remove greater than the threshold one
    remove_polygons_based_values(result_shp, IOU_mapped_polygons,
                                 IOU_threshold, False, save_FP_path)
    basic.outputlogMessage('save false positives to %s' % save_FP_path)

    # calculate IOU
    IOU_groud_truth = vector_features.calculate_IoU_scores(val_shp, result_shp)
    save_FN_path = io_function.get_name_by_adding_tail(result_shp, 'FN')
    # set False, remove greater than the threshold one
    remove_polygons_based_values(val_shp, IOU_groud_truth, IOU_threshold,
                                 False, save_FN_path)
    basic.outputlogMessage('save false negatives to %s' % save_FN_path)
Пример #3
0
def main(options, args):

    dem_diff_path = args[0]
    io_function.is_file_exist(dem_diff_path)
    save_dir = options.save_dir
    process_num = options.process_num
    # segment_subsidence_on_dem_diff(dem_diff_path,save_dir)
    b_rm_tmp_files = options.b_remove_tmp_files

    dem_diff_grey_8bit = options.dem_diff_8bit
    if dem_diff_grey_8bit is None:
        dem_diff_grey_8bit  = get_dem_diff_8bit(dem_diff_path)

    if save_dir is None:
        save_dir = get_save_dir(dem_diff_path)

    ele_diff_thr = options.ele_diff_thr
    min_area = options.min_area
    max_area = options.max_area

    segment_subsidence_grey_image(dem_diff_grey_8bit, dem_diff_path, save_dir, process_num, subsidence_thr_m=ele_diff_thr,
                                  min_area=min_area, max_area=max_area,b_rm_files=b_rm_tmp_files)

    # the resut is a litte worse
    # segment_subsidence_grey_image_v2(dem_diff_grey_8bit, dem_diff_path, save_dir, process_num, subsidence_thr_m=ele_diff_thr,
    #                               min_area=min_area, max_area=max_area)

    pass
Пример #4
0
def main(options, args):
    shp_path = args[0]
    io_function.is_file_exist(shp_path)

    ref_raster = options.reference_raster
    # nodata = options.nodata
    out_dir = options.out_dir
    attribute_name = options.attribute
    burn_value = options.burn_value
    b_burn_edge = options.burn_edge_255

    file_name = os.path.splitext(os.path.basename(shp_path))[0]
    save_path = os.path.join(out_dir, file_name + '_label.tif')
    if os.path.isfile(save_path):
        print('Warning, %s already exists' % save_path)
        return True

    if ref_raster is not None:
        rasterize_polygons_to_ref_raster(ref_raster,
                                         shp_path,
                                         burn_value,
                                         attribute_name,
                                         save_path,
                                         ignore_edge=b_burn_edge)
    else:
        xres = options.pixel_size_x
        yres = options.pixel_size_y
        rasterize_polygons(shp_path, burn_value, attribute_name, xres, yres,
                           save_path)
    def add_fields_from_raster(self,
                               ori_shp,
                               raster_file,
                               field_name,
                               band=1,
                               stats_list=None,
                               all_touched=False):
        """
        get field value from raster file by using "rasterstats"

        """
        if io_function.is_file_exist(
                ori_shp) is False or io_function.is_file_exist(
                    raster_file) is False:
            return False
        # stats_list = ['min', 'max', 'mean', 'count','median','std']
        if stats_list is None:
            stats_list = ['mean', 'std']

        # band = 1
        stats = zonal_stats(ori_shp,
                            raster_file,
                            band=band,
                            stats=stats_list,
                            all_touched=all_touched)
        #test
        # for tp in stats:
        #     print("mean:",tp["mean"],"std:",tp["std"])

        if self.add_fields_to_shapefile(ori_shp, stats, field_name) is False:
            basic.outputlogMessage('add fields to shape file failed')

        return True
Пример #6
0
def main(options, args):

    t_polygons_shp = args[0]
    image_folder = args[1]   # folder for store image tile (many split block of a big image)

    b_label_image = options.no_label_image
    process_num = options.process_num

    # check training polygons
    assert io_function.is_file_exist(t_polygons_shp)
    t_polygons_shp_all = options.all_training_polygons
    if t_polygons_shp_all is None:
        basic.outputlogMessage('Warning, the full set of training polygons is not assigned, '
                               'it will consider the one in input argument is the full set of training polygons')
        t_polygons_shp_all = t_polygons_shp
    else:
        if get_projection_proj4(t_polygons_shp) != get_projection_proj4(t_polygons_shp_all):
            raise ValueError('error, projection insistence between %s and %s'%(t_polygons_shp, t_polygons_shp_all))
    assert io_function.is_file_exist(t_polygons_shp_all)

    # get image tile list
    # image_tile_list = io_function.get_file_list_by_ext(options.image_ext, image_folder, bsub_folder=False)
    image_tile_list = io_function.get_file_list_by_pattern(image_folder,options.image_ext)
    if len(image_tile_list) < 1:
        raise IOError('error, failed to get image tiles in folder %s'%image_folder)

    check_projection_rasters(image_tile_list)   # it will raise errors if found problems

    # comment out on June 18, 2021,
    # check_1or3band_8bit(image_tile_list)  # it will raise errors if found problems

    #need to check: the shape file and raster should have the same projection.
    if get_projection_proj4(t_polygons_shp) != get_projection_proj4(image_tile_list[0]):
        raise ValueError('error, the input raster (e.g., %s) and vector (%s) files don\'t have the same projection'%(image_tile_list[0],t_polygons_shp))

    # check these are EPSG:4326 projection
    if get_projection_proj4(t_polygons_shp).strip() == '+proj=longlat +datum=WGS84 +no_defs':
        bufferSize = meters_to_degress_onEarth(options.bufferSize)
    else:
        bufferSize = options.bufferSize

    saved_dir = options.out_dir
    # if os.system('mkdir -p ' + os.path.join(saved_dir,'subImages')) != 0:
    #     sys.exit(1)
    # if os.system('mkdir -p ' + os.path.join(saved_dir,'subLabels')) !=0:
    #     sys.exit(1)
    io_function.mkdir(os.path.join(saved_dir,'subImages'))
    if b_label_image:
        io_function.mkdir(os.path.join(saved_dir,'subLabels'))

    dstnodata = options.dstnodata
    if 'qtb_sentinel2' in image_tile_list[0]:
        # for qtb_sentinel-2 mosaic
        pre_name = '_'.join(os.path.splitext(os.path.basename(image_tile_list[0]))[0].split('_')[:4])
    else:
        pre_name = os.path.splitext(os.path.basename(image_tile_list[0]))[0]
    get_sub_images_and_labels(t_polygons_shp, t_polygons_shp_all, bufferSize, image_tile_list,
                              saved_dir, pre_name, dstnodata, brectangle=options.rectangle, b_label=b_label_image,
                              proc_num=process_num)
Пример #7
0
def main(options, args):

    img_path = args[0]
    io_function.is_file_exist(img_path)
    save_dir = options.save_dir
    segment_changes_on_dem_diff(img_path, save_dir)

    pass
def main(options, args):

    input_shp = args[0]
    io_function.is_file_exist(input_shp)

    min_area = options.min_area
    max_area = options.max_area
    refine_dem_reduction_polygons(input_shp, min_area, max_area)
Пример #9
0
def get_ext_shps():
    # make sure in different machine, the extent file are in the same order, we create a list: ArcticDEM_subsets_list.txt
    shps_list_txt = os.path.join(ext_shp_dir, 'ArcticDEM_subsets_list.txt')
    shps_list = io_function.read_list_from_txt(shps_list_txt)
    ext_shps = [os.path.join(ext_shp_dir, item) for item in shps_list]
    # check existence
    for ext in ext_shps:
        io_function.is_file_exist(ext)
    return ext_shps
Пример #10
0
def get_sub_images_pixel_json_files(polygons_shp, image_folder_or_path,
                                    image_pattern, class_names, bufferSize,
                                    dstnodata, saved_dir, b_rectangle,
                                    process_num):

    # check training polygons
    assert io_function.is_file_exist(polygons_shp)

    # get image tile list
    # image_tile_list = io_function.get_file_list_by_ext(options.image_ext, image_folder, bsub_folder=False)
    if os.path.isdir(image_folder_or_path):
        image_tile_list = io_function.get_file_list_by_pattern(
            image_folder_or_path, image_pattern)
    else:
        assert io_function.is_file_exist(image_folder_or_path)
        image_tile_list = [image_folder_or_path]

    if len(image_tile_list) < 1:
        raise IOError('error, failed to get image tiles in folder %s' %
                      image_folder_or_path)

    get_subImages.check_projection_rasters(
        image_tile_list)  # it will raise errors if found problems

    get_subImages.check_1or3band_8bit(
        image_tile_list)  # it will raise errors if found problems

    # need to check: the shape file and raster should have the same projection.
    if get_subImages.get_projection_proj4(
            polygons_shp) != get_subImages.get_projection_proj4(
                image_tile_list[0]):
        raise ValueError(
            'error, the input raster (e.g., %s) and vector (%s) files don\'t have the same projection'
            % (image_tile_list[0], polygons_shp))

    # check these are EPSG:4326 projection
    if get_subImages.get_projection_proj4(
            polygons_shp).strip() == '+proj=longlat +datum=WGS84 +no_defs':
        bufferSize = get_subImages.meters_to_degress_onEarth(bufferSize)

    pre_name = os.path.splitext(os.path.basename(image_tile_list[0]))[0]

    saved_dir = os.path.join(saved_dir, pre_name + '_subImages')
    if os.path.isdir(saved_dir) is False:
        io_function.mkdir(saved_dir)

    get_sub_images_and_json_files(polygons_shp,
                                  class_names,
                                  bufferSize,
                                  image_tile_list,
                                  saved_dir,
                                  pre_name,
                                  dstnodata,
                                  brectangle=b_rectangle,
                                  proc_num=process_num)
Пример #11
0
def create_new_region_defined_parafile(template_para_file,
                                       img_dir,
                                       area_remark=None):
    '''
    create a new region defined para file. Only defined the new images (did not change others)
    :param template_para_file:
    :param img_dir:
    :param area_remark:
    :return:
    '''
    io_function.is_file_exist(template_para_file)

    dir_base = os.path.basename(img_dir)
    date_strs = re.findall('\d{8}', dir_base)
    if len(date_strs) == 1:
        date = date_strs[0]
    else:
        date = 'unknown'

    new_para_file = io_function.get_name_by_adding_tail(
        template_para_file, date + '_' + area_remark)
    new_para_file = os.path.basename(new_para_file)  # save to current folder
    if os.path.isfile(new_para_file):
        raise IOError('%s already exists, please check or remove first' %
                      new_para_file)

    # copy the file
    io_function.copy_file_to_dst(template_para_file, new_para_file)

    if area_remark is not None:
        modify_parameter(new_para_file, 'area_remark', area_remark)
    if date != 'unknown':
        modify_parameter(new_para_file, 'area_time', date)

    modify_parameter(new_para_file, 'input_image_dir', img_dir)
    modify_parameter(new_para_file, 'inf_image_dir', img_dir)

    tif_list = io_function.get_file_list_by_ext('.tif',
                                                img_dir,
                                                bsub_folder=False)
    if len(tif_list) < 1:
        raise ValueError('No tif in %s' % img_dir)
    if len(tif_list) == 1:
        modify_parameter(new_para_file, 'input_image_or_pattern',
                         os.path.basename(tif_list[0]))
        modify_parameter(new_para_file, 'inf_image_or_pattern',
                         os.path.basename(tif_list[0]))
    else:
        modify_parameter(new_para_file, 'input_image_or_pattern', '*.tif')
        modify_parameter(new_para_file, 'inf_image_or_pattern', '*.tif')

    print("modified and saved new parameter file: %s " % new_para_file)

    return new_para_file
Пример #12
0
def add_raster_info_from_bufferArea(polygons_shp, raster_file, raster_name,
                                    b_buffer_size):
    """
    calculate the raster information such elevation, then add toeach polygon
    Args:
        polygons_shp: input shapfe file
        raster_file:  raster file, should have the same projection of shapefile
        raster_name: the name of raster, should less than four letters, will be used as part of the attribute name
        b_buffer_size: the size of buffer area in meters

    Returns: True if successful, False Otherwise
    """
    if io_function.is_file_exist(polygons_shp) is False:
        return False
    if io_function.is_file_exist(raster_file) is False:
        return False
    operation_obj = shape_opeation()

    ## calculate the topography information from the buffer area
    basic.outputlogMessage(
        "info: calculate the raster information from the buffer area")
    buffer_polygon_shp = io_function.get_name_by_adding_tail(
        polygons_shp, 'buffer')
    # if os.path.isfile(buffer_polygon_shp) is False:
    if vector_features.get_buffer_polygons(polygons_shp, buffer_polygon_shp,
                                           b_buffer_size) is False:
        raise IOError("error, failed in producing the buffer_polygon_shp")

    # replace the polygon shape file
    polygons_shp_backup = polygons_shp
    polygons_shp = buffer_polygon_shp

    # all_touched: bool, optional
    #     Whether to include every raster cell touched by a geometry, or only
    #     those having a center point within the polygon.
    #     defaults to `False`
    #   Since the dem usually is coarser, so we set all_touched = True
    all_touched = True

    stats_list = ['min', 'max', 'mean',
                  'std']  #['min', 'max', 'mean', 'count','median','std']
    if operation_obj.add_fields_from_raster(polygons_shp,
                                            raster_file,
                                            raster_name,
                                            band=1,
                                            stats_list=stats_list,
                                            all_touched=all_touched) is False:
        return False

    # copy the information to the original shape file
    operation_obj.add_fields_shape(polygons_shp_backup, buffer_polygon_shp,
                                   polygons_shp_backup)

    return True
Пример #13
0
def main(options, args):

    img_path = args[0]
    io_function.is_file_exist(img_path)
    save_dir = options.save_dir
    process_num = options.process_num
    org_elevation_diff = options.elevation_diff

    segment_a_grey_image(img_path,
                         save_dir,
                         process_num,
                         org_raster=org_elevation_diff)
Пример #14
0
def get_tile_min_overlap(raster_file_or_files):
    if isinstance(raster_file_or_files,str):
        io_function.is_file_exist(raster_file_or_files)
        image_tiles = [raster_file_or_files]
    elif isinstance(raster_file_or_files,list):
        image_tiles = raster_file_or_files
    else:
        raise ValueError('unsupport type for %s'%str(raster_file_or_files))

    xres, yres = raster_io.get_xres_yres_file(image_tiles[0])
    tile_min_overlap = abs(xres * yres)
    return tile_min_overlap
Пример #15
0
    def read_training_pixels_inside_polygons(self, img_path, shp_path):
        '''
        read pixels on a image in the extent of polygons
        :param img_path: the path of an image
        :param shp_path: the path of shape file
        :return:
        '''
        if io_function.is_file_exist(
                img_path) is False or io_function.is_file_exist(
                    shp_path) is False:
            return False

        no_data = 255  # consider changing to other values
        touch = False  # we only read the pixels inside the polygons, so set all_touched as False
        sub_images, class_labels = build_RS_data.read_pixels_inside_polygons(
            img_path, shp_path, mask_no_data=no_data, touch=touch)

        # read them one by one
        Xs, ys = [], []
        for idx, (img_data, label) in enumerate(zip(sub_images, class_labels)):
            # img: 3d array (nband, height, width)
            # label: int values

            # print(img_data)
            # print(label)
            X_arr = img_data.reshape(img_data.shape[0], -1)

            # remove non-data pixels
            valid_pixles = np.any(X_arr != no_data, axis=0)
            X_arr = X_arr[:, valid_pixles]
            valid_pixel_count = int(X_arr.size / img_data.shape[0])
            # print('pixel count',valid_pixel_count)
            if valid_pixel_count < 1:
                basic.outputlogMessage(
                    'Warning, No valid pixel in %d th polygon due to its small size'
                    % valid_pixel_count)
                continue

            y_arr = np.ones(X_arr.shape[1]) * label
            Xs.append(X_arr)
            ys.append(y_arr)

        X_pixels = np.concatenate(Xs, axis=1)
        y_pixels = np.concatenate(ys, axis=0)
        X_pixels = np.transpose(X_pixels, (1, 0))
        basic.outputlogMessage(str(X_pixels.shape))
        basic.outputlogMessage(str(y_pixels.shape))

        return X_pixels, y_pixels
Пример #16
0
def read_training_pixels(image_path, label_path):
    """
    read training pixels from image and the corresponding label
    :param image_path:
    :param label_path:
    :return: X,y array or False
    """

    if io_function.is_file_exist(
            image_path) is False or io_function.is_file_exist(
                label_path) is False:
        return False

    # check: they are from the same polygons
    polygon_index_img = os.path.basename(image_path).split('_')[-3]
    polygon_index_label = os.path.basename(label_path).split('_')[-3]
    if polygon_index_img != polygon_index_label:
        raise ValueError("%s and %s are not from the same training polygons" %
                         (image_path, label_path))

    with rasterio.open(image_path) as img_obj:
        # read the all bands
        indexes = img_obj.indexes
        nbands = len(indexes)
        img_data = img_obj.read(indexes)

    with rasterio.open(label_path) as img_obj:
        # read the all bands (only have one band)
        indexes = img_obj.indexes
        if len(indexes) != 1:
            raise ValueError('error, the label should only have one band')

        label_data = img_obj.read(indexes)

    # check the size
    # print(img_data.shape)
    # print(label_data.shape)
    if img_data.shape[1] != label_data.shape[1] or img_data.shape[
            2] != label_data.shape[2]:
        raise ValueError('the image and label have different size')

    X_arr = img_data.reshape(nbands, -1)
    y_arr = label_data.reshape(-1)

    basic.outputlogMessage(str(X_arr.shape))
    basic.outputlogMessage(str(y_arr.shape))
    # sys.exit(1)

    return X_arr, y_arr
Пример #17
0
def calculate_center_latlon(input_vector, save_path, b_save2shp=False):
    io_function.is_file_exist(input_vector)
    epsg_info = map_projection.get_raster_or_vector_srs_info(
        input_vector, 'epsg')
    epsg_int = int(epsg_info.split(':')[1])

    polygons = vector_gpd.read_polygons_gpd(input_vector,
                                            b_fix_invalid_polygon=False)
    poly_center = [vector_gpd.get_polygon_centroid(item) for item in polygons]

    # to list for the input
    x = [item.x for item in poly_center]
    y = [item.y for item in poly_center]
    # in-place change in x, and y
    # print(x[0],y[0])
    # print(os.getenv('PATH'))
    if map_projection.convert_points_coordinate_epsg(x, y, epsg_int,
                                                     4326):  # to 'EPSG:4326'
        # print(x[0], y[0])
        pass
    else:
        raise ValueError('error in convert coordinates')

    # save to file
    save_lines = ['%s,%s\n' % (str(xx), str(yy)) for xx, yy in zip(x, y)]
    with open(save_path, 'w') as f_obj:
        f_obj.writelines(save_lines)
        basic.outputlogMessage(
            'saved latitude and longitude of polygons to %s' % save_path)

    ext_save_path = io_function.get_name_by_adding_tail(save_path, 'ext')
    delta = map_projection.meters_to_degrees_onEarth(
        1500)  #calculate distance in degree
    with open(ext_save_path, 'w') as f_obj:
        for xx, yy in zip(x, y):
            left_x = xx - delta
            right_x = xx + delta
            up_yy = yy + delta
            down_yy = yy - delta
            f_obj.writelines('%f,%f,%f,%f\n' %
                             (left_x, down_yy, right_x, up_yy))

    # write the value to shapefile
    attributes = {'centerLat': x, 'centerLon': y}
    if b_save2shp:
        vector_gpd.add_attributes_to_shp(input_vector, attributes)
        basic.outputlogMessage('saved polygons latitude and longitude to %s' %
                               input_vector)
Пример #18
0
def add_raster_info_insidePolygons(polygons_shp, raster_file, raster_name):
    """
    calculate the hydrology information of each polygons
    Args:
        polygons_shp:  input shapfe file
        flow_accumulation: the file path of flow accumulation

    Returns: True if successful, False Otherwise

    """
    if io_function.is_file_exist(polygons_shp) is False:
        return False
    operation_obj = shape_opeation()

    # all_touched: bool, optional
    #     Whether to include every raster cell touched by a geometry, or only
    #     those having a center point within the polygon.
    #     defaults to `False`
    #   Since the dem usually is coarser, so we set all_touched = True
    all_touched = True

    # #DEM

    stats_list = ['min', 'max', 'mean', 'median',
                  'std']  # ['min', 'max', 'mean', 'count','median','std']
    if operation_obj.add_fields_from_raster(polygons_shp,
                                            raster_file,
                                            raster_name,
                                            band=1,
                                            stats_list=stats_list,
                                            all_touched=all_touched) is False:
        return False

    return True
Пример #19
0
    def get_polygon_shape_info(self, input_shp, out_box, bupdate=False):
        """
        get Oriented minimum bounding box for a polygon shapefile,
        and update the shape information based on oriented minimum bounding box to
        the input shape file
        :param input_shp: input polygon shape file
        :param out_box: output Oriented minimum bounding box shape file
        :param bupdate: indicate whether update the original input shapefile
        :return:True is successful, False Otherwise
        """
        if io_function.is_file_exist(input_shp) is False:
            return False

        if self.__qgis_app is None:
            try:
                self.initQGIS()
            except:
                basic.outputlogMessage("initial QGIS error")
                self.__qgis_app = None
                return False

        processing.runalg("qgis:orientedminimumboundingbox", input_shp, True,
                          out_box)

        if os.path.isfile(out_box) is False:
            basic.outputlogMessage(
                "error: result file not exist, getting orientedminimumboundingbox failed"
            )
            return False

        #update shape info to input shape file
        if bupdate is True:
            pass
Пример #20
0
def main(options, args):

    shape_file = args[0]
    if io_function.is_file_exist(shape_file) is False:
        return False

    # get ground truth polygons
    val_path = parameters.get_validation_shape()  # ground truth

    input_shp = shape_file
    groud_truth_shp = val_path
    basic.outputlogMessage('result shape: %s' % input_shp)
    basic.outputlogMessage('ground truth shape: %s' % groud_truth_shp)
    # calculate the IoU of each predicted polygons
    iou_pre = np.array(get_iou_scores(input_shp, groud_truth_shp))

    # calculate the IoU of each ground truth, for false negative
    iou_GT = np.array(get_iou_scores(groud_truth_shp, input_shp))

    iou_thr = options.iou_threshold
    basic.outputlogMessage('iou_threshold: %f' % iou_thr)
    precision, recall, f1score = calculate_precision_recall_iou(
        iou_pre, iou_GT, iou_thr)
    basic.outputlogMessage("precision, recall, f1score: %f,%f,%f" %
                           (precision, recall, f1score))
Пример #21
0
def main(options, args):
    input_shp = args[0]
    # manual section, could a files in a folder or ids in a table
    manual_sel = args[1]
    io_function.is_file_exist(input_shp)

    save_path = options.save_path
    if save_path is None:
        save_path = io_function.get_name_by_adding_tail(input_shp,'manuSelect')

    if manual_sel.endswith('.xlsx'):
        select_polygons_by_ids_in_excel(input_shp,manual_sel,save_path)
    elif os.path.isdir(manual_sel):
        select_polygons_by_ids_in_filenames(input_shp,manual_sel,save_path)
    else:
        print('unknown input of manual selection')
Пример #22
0
def get_hisogram_of_oneband_raster(image_path):
    if io_function.is_file_exist(image_path) is False:
        return False

    CommandString = 'gdalinfo -json -hist -mm ' + image_path
    imginfo = basic.exec_command_string_output_string(CommandString)
    if imginfo is False:
        return False
    imginfo_obj = json.loads(imginfo)

    try:
        bands_info = imginfo_obj['bands']
        band_info = bands_info[0]   # only care band one (suppose only have one band)
        histogram_info = band_info["histogram"]

        hist_count = histogram_info["count"]
        hist_min = histogram_info["min"]
        hist_max = histogram_info["max"]
        hist_buckets = histogram_info["buckets"]
        return (hist_count,hist_min,hist_max,hist_buckets)

    except KeyError:
        basic.outputlogMessage(str(KeyError))
        pass
    return (False, False,False,False)

    pass
Пример #23
0
    def has_field(self, input_shp, field_name):
        """
        inquires whether the shape file contains the specific field given by the field name
        :param input_shp: shape file path
        :param field_name: the name of the specific field
        :return: True if exist, False otherwise
        """
        if io_function.is_file_exist(input_shp) is False:
            return False
        try:
            org_obj = shapefile.Reader(input_shp)
        except IOError:
            basic.outputlogMessage(str(IOError))
            return False
        all_fields = org_obj.fields
        field_len = len(all_fields)

        for t_index in range(0, field_len):
            t_field = all_fields[t_index]
            if isinstance(t_field, tuple):
                # t_index += 1  occur once
                continue
            if field_name == t_field[0]:
                return True  #find the specific field of the given name

        return False
Пример #24
0
    def remove_nonclass_polygon(self, shape_file, out_shp, class_field_name):
        """
        remove polygons that are not belong to targeted class, it means the value of class_field_name is 0
        :param shape_file: input shapefile containing all the polygons
        :param out_shp: output shapefile
        :param class_field_name: the name of class field, such as svmclass, treeclass
        :return: True if successful, False Otherwise
        """
        if io_function.is_file_exist(shape_file) is False:
            return False

        try:
            org_obj = shapefile.Reader(shape_file)
        except:
            basic.outputlogMessage(str(IOError))
            return False

        # Create a new shapefile in memory
        w = shapefile.Writer()
        w.shapeType = org_obj.shapeType

        org_records = org_obj.records()
        if (len(org_records) < 1):
            basic.outputlogMessage('error, no record in shape file ')
            return False

        # Copy over the geometry without any changes
        w.fields = list(org_obj.fields)
        field_index = self.__find_field_index(w.fields, class_field_name)
        if field_index is False:
            return False
        shapes_list = org_obj.shapes()
        org_shape_count = len(shapes_list)
        i = 0
        removed_count = 0
        for i in range(0, len(shapes_list)):
            rec = org_records[i]
            if rec[field_index] == 0:  # remove the record which class is 0, 0 means non-gully
                removed_count = removed_count + 1
                continue

            w._shapes.append(shapes_list[i])
            rec = org_records[i]
            w.records.append(rec)

        basic.outputlogMessage('Remove non-class polygon, total count: %d' %
                               removed_count)
        # w._shapes.extend(org_obj.shapes())
        if removed_count == org_shape_count:
            basic.outputlogMessage('error: already remove all the polygons')
            return False

        # copy prj file
        org_prj = os.path.splitext(shape_file)[0] + ".prj"
        out_prj = os.path.splitext(out_shp)[0] + ".prj"
        io_function.copy_file_to_dst(org_prj, out_prj, overwrite=True)

        w.save(out_shp)
        return True
Пример #25
0
def polygons2geojson(input_shp, save_folder):
    '''
    convert polygons in shapefiles to many geojson (each for one polygon)
    :param input_shp:
    :param save_folder:
    :return:
    '''
    io_function.is_file_exist(input_shp)
    if os.path.isdir(save_folder) is False:
        io_function.mkdir(save_folder)

    polygons, ids = vector_gpd.read_polygons_attributes_list(input_shp, 'id')
    prj_info = map_projection.get_raster_or_vector_srs_info_epsg(
        input_shp)  # geojson need EPSG, such as "EPSG:3413"
    # print(prj_info)
    for poly, id in zip(polygons, ids):
        save_one_polygon_2geojson(poly, id, prj_info, save_folder)
Пример #26
0
def convert_gcp_format(ref_image,warp_image,dem_file,pts_files,output=None):
    '''
    convert the ground control points to ASP format
    :param ref_image: the reference image on which selected ground control points
    :param warp_image: the input image need to co-registration or orthorectification
    :param dem_file: the dem file
    :param pts_files: ground control points by using ImageMatchsiftGPU (Envi format)
    :return: the path of new ground control points if successful, otherwise, None
    '''
    # check file
    assert io_function.is_file_exist(ref_image)
    assert io_function.is_file_exist(warp_image)
    assert io_function.is_file_exist(pts_files)
    assert io_function.is_file_exist(dem_file)

    # read pts file
    # tie_points (x1,y1,x2,y2): (x1,y1) and (x2,y2) are column and row on the base and warp image
    base_file, warp_file, tie_points = read_envi_pts(pts_files)

    # check the ref and wrap image
    if base_file not in ref_image:
        raise ValueError('error, the reference image: %s in the pts file is not the same as the input:%s'
                          % (base_file,ref_image))
    if warp_file not in warp_image:
        raise ValueError('error, the warp image: %s in the pts file is not the same as the input:%s'
                            % (warp_file, warp_image))

    # get latitude, longitude from base image

    lon_lat_list = [map_projection.convert_pixel_xy_to_lat_lon(x1,y1,ref_image) for [x1,y1,_,_] in tie_points]

    print(lon_lat_list)
    # get elevation
    # the DEM (SRTM) already convert from geoid (EGM96) based to ellipsoid (WGS 84) based
    ele_list = [RSImage.get_image_location_value(dem_file,lon,lat,'lon_lat_wgs84',1)
                for (lon,lat) in lon_lat_list ]

    # save to file
    x2_list = [x2 for [_,_,x2,_] in tie_points]
    y2_list = [y2 for [_, _, _, y2] in tie_points]
    with open(output,'w') as fw:
        for idx, ((lon,lat),ele,x2,y2) in enumerate(zip(lon_lat_list,ele_list,x2_list, y2_list)):

            fw.writelines('%d %.6lf %.6lf %.6lf %.2lf %.2lf %.2lf %s %.2lf %.2lf %.2lf %.2lf \n'%
                          (idx, lat, lon, ele, 1.0,1.0, 1.0, warp_image, x2, y2, 1.0, 1.0))
Пример #27
0
def segment_subsidence_grey_image_v2(dem_diff_grey_8bit, dem_diff, save_dir,process_num, subsidence_thr_m=-0.5, min_area=40, max_area=100000000):
    '''
    segment subsidence areas based on 8bit dem difference
    :param dem_diff_grey_8bit:
    :param dem_diff:
    :param save_dir:
    :param process_num:
    :param subsidence_thr_m: mean value less than this one consider as subsidence (in meter)
    :param min_area: min size in m^2 (defualt is 40 m^2, 10 pixels on ArcticDEM)
    :param max_area: min size in m^2 (default is 10km by 10 km)
    :return:
    '''

    io_function.is_file_exist(dem_diff_grey_8bit)

    out_pre = os.path.splitext(os.path.basename(dem_diff_grey_8bit))[0]
    segment_shp_path = os.path.join(save_dir, out_pre + '.shp')

    # get initial polygons
    # because the label from segmentation for superpixels are not unique, so we may need to get mean dem diff based on polygons, set org_raster=None
    label_path_list = segment_a_grey_image(dem_diff_grey_8bit,save_dir,process_num, org_raster=None,b_save_patch_label=True)

    patch_shp_list = polygonize_label_images(label_path_list, org_raster=dem_diff, stats=['mean', 'std', 'count'], prefix='demD',
                            process_num=process_num, b_remove_nodata=True)

    # post-processing for each patch shp
    post_patch_shp_list = []
    for idx, shp in enumerate(patch_shp_list):
        # get DEM diff information for each polygon.
        post_shp = get_dem_subscidence_polygons(shp, dem_diff, dem_diff_thread_m=subsidence_thr_m,
                                     min_area=min_area, max_area=max_area, process_num=1)
        if post_shp is not None:
            post_patch_shp_list.append(post_shp)

    # merge shapefile
    if os.path.isdir(save_dir) is False:
        io_function.mkdir(save_dir)
    vector_gpd.merge_shape_files(post_patch_shp_list,segment_shp_path)

    # post-processing again
    dem_diff_shp = get_dem_subscidence_polygons(segment_shp_path, dem_diff, dem_diff_thread_m=subsidence_thr_m,
                                            min_area=min_area, max_area=max_area, process_num=1)

    basic.outputlogMessage('obtain elevation reduction polygons: %s'%dem_diff_shp)
    return True
Пример #28
0
def add_polygon_attributes(input, output, para_file, data_para_file):

    if io_function.is_file_exist(input) is False:
        return False

    # copy output
    if io_function.copy_shape_file(input, output) is False:
        raise IOError('copy shape file %s failed' % input)

    # remove narrow parts of mapped polygons
    polygon_narrow_part_thr = parameters.get_digit_parameters_None_if_absence(
        para_file, 'mapped_polygon_narrow_threshold', 'float')
    #  if it is not None, then it will try to remove narrow parts of polygons
    if polygon_narrow_part_thr is not None and polygon_narrow_part_thr > 0:
        # use the buffer operation to remove narrow parts of polygons
        basic.outputlogMessage(
            "start removing narrow parts (thr %.2f) in polygons" %
            (polygon_narrow_part_thr * 2))
        if vector_gpd.remove_narrow_parts_of_polygons_shp_NOmultiPolygon(
                input, output, polygon_narrow_part_thr):
            message = "Finished removing narrow parts (thr %.2f) in polygons and save to %s" % (
                polygon_narrow_part_thr * 2, output)
            basic.outputlogMessage(message)
        else:
            pass
    else:
        basic.outputlogMessage(
            "warning, mapped_polygon_narrow_threshold is not in the parameter file, skip removing narrow parts"
        )

    # calculate area, perimeter of polygons
    if cal_add_area_length_of_polygon(output) is False:
        return False

    # calculate the polygon information
    b_calculate_shape_info = parameters.get_bool_parameters_None_if_absence(
        para_file, 'b_calculate_shape_info')
    if b_calculate_shape_info:
        # remove "_shapeInfo.shp" to make it calculate shape information again
        os.system('rm *_shapeInfo.shp')
        if calculate_gully_information(output) is False:
            return False

    # add topography of each polygons
    dem_files, slope_files, aspect_files, dem_diff_files = get_topographic_files(
        data_para_file)
    if calculate_polygon_topography(output,
                                    para_file,
                                    dem_files,
                                    slope_files,
                                    aspect_files=aspect_files,
                                    dem_diffs=dem_diff_files) is False:
        basic.outputlogMessage(
            'Warning: calculate information of topography failed')
        # return False   #  don't return

    return True
Пример #29
0
def main(options, args):
    img_path = args[0]
    save_path = args[1]
    io_function.is_file_exist(img_path)
    if os.path.isfile(save_path):
        print('%s exists, remove it if want to re-generate it' % save_path)
        return

    img_np_allbands, src_nodata = raster_io.read_raster_all_bands_np(img_path)
    if options.src_nodata is not None:
        src_nodata = options.src_nodata

    scales = options.scale
    if scales is not None:
        print('input scale (src_min src_max dst_min dst_max): ' + str(scales))
        img_array_8bit = raster_io.image_numpy_allBands_to_8bit(
            img_np_allbands,
            scales,
            src_nodata=src_nodata,
            dst_nodata=options.dst_nodata)
    else:
        min_percent = options.hist_min_percent
        max_percent = options.hist_max_percent
        min_max_value = options.min_max_value
        img_array_8bit = raster_io.image_numpy_allBands_to_8bit_hist(
            img_np_allbands,
            min_max_value,
            per_min=min_percent,
            per_max=max_percent,
            src_nodata=src_nodata,
            dst_nodata=options.dst_nodata)

    # save to file
    if options.dst_nodata is None:
        nodata = src_nodata
    else:
        nodata = options.dst_nodata
    return raster_io.save_numpy_array_to_rasterfile(img_array_8bit,
                                                    save_path,
                                                    img_path,
                                                    nodata=nodata,
                                                    compress='lzw',
                                                    tiled='yes',
                                                    bigtiff='if_safer')
Пример #30
0
def main(options, args):

    shape_file = args[0]
    if io_function.is_file_exist(shape_file) is False:
        return False

    # draw_image_histogram_oneband("/Users/huanglingcao/Data/eboling/DEM/20160728-Large-DSM-NaN_slope.tif","slope_hist.jpg")
    # draw_image_histogram_oneband("/Users/huanglingcao/Data/eboling/DEM/20160728-Large-DSM-NaN.tif","dem_hist.jpg")

    # draw_dem_slope_hist("/Users/huanglingcao/Data/eboling/DEM/20160728-Large-DSM-NaN.tif",
    #                     "/Users/huanglingcao/Data/eboling/DEM/20160728-Large-DSM-NaN_slope.tif",
    #                     "dem_slope_histogram.jpg")

    # draw_two_attribute_scatter(shape_file, "INarea", "IoU", "IoU_InArea_scatter.jpg",color='k')
    # draw_one_attribute_histogram(shape_file, "IoU", "IoU (0-1)", "IoU.jpg")  # ,hatch='-'

    draw_one_attribute_histogram(shape_file, "INarea", "Area ($m^2$)", "area.jpg")   #,hatch='-'
    # draw_one_attribute_histogram(shape_file, "INperimete", "Perimeter (m)", "Perimeter.jpg")  #,hatch='\\'
    # draw_one_attribute_histogram(shape_file, "ratio_w_h", "ratio of HEIGHT over WIDTH (W>H)", "ratio_w_h.jpg")
    # draw_one_attribute_histogram(shape_file, "ratio_p_a", "ratio of $perimeter^2$ over area", "ratio_p_a.jpg")
    # draw_one_attribute_histogram(shape_file, "circularit", "Circularity", "Circularity.jpg")  # ,hatch='.'
    # #
    # # # topography
    # draw_one_attribute_histogram(shape_file, "dem_std", "standard variance of DEM", "dem_std.jpg")
    # draw_one_attribute_histogram(shape_file, "dem_max", "maximum value of DEM (meter)", "dem_max.jpg")
    draw_one_attribute_histogram(shape_file, "dem_mean", "Mean Elevation (m)", "dem_mean.jpg")  # ,hatch='x'
    # draw_one_attribute_histogram(shape_file, "dem_min", "minimum value of DEM (meter)", "dem_min.jpg")
    # #
    # draw_one_attribute_histogram(shape_file, "slo_std", "standard variance of Slope", "slo_std.jpg")
    # draw_one_attribute_histogram(shape_file, "slo_max", "maximum value of Slope ($^\circ$)", "slo_max.jpg")
    # draw_one_attribute_histogram(shape_file, "slo_mean", "Mean Slope ($^\circ$)", "slo_mean.jpg") #,hatch='/'
    # draw_one_attribute_histogram(shape_file, "slo_min", "minimum value of Slope ($^\circ$)", "slo_min.jpg")

    #
    # #hydrology
    # draw_one_attribute_histogram(shape_file, "F_acc_std", "standard variance of Flow accumulation", "F_acc_std.jpg")
    # draw_one_attribute_histogram(shape_file, "F_acc_max", "maximum value of Flow accumulation", "F_acc_max.jpg")
    # draw_one_attribute_histogram(shape_file, "F_acc_mean", "mean value of Flow accumulation", "F_acc_mean.jpg")
    # draw_one_attribute_histogram(shape_file, "F_acc_min", "minimum value of Flow accumulation", "F_acc_min.jpg")
    #

    # draw wind rose of aspect
    # draw_one_attribute_windrose(shape_file, "asp_std", "standard variance of Aspect", "asp_std.jpg")
    # draw_one_attribute_windrose(shape_file, "asp_max", "maximum value of Aspect ($^\circ$)", "asp_max.jpg")
    # draw_one_attribute_windrose(shape_file, "asp_mean", "Mean Aspect ($^\circ$)", "asp_mean.jpg") #,hatch='/'
    # draw_one_attribute_windrose(shape_file, "asp_min", "minimum value of Aspect ($^\circ$)", "asp_min.jpg")

    # draw wind rose of azimuth from manually draw lines
    # draw_one_attribute_windrose(shape_file, "aspectLine", "Mean Aspect ($^\circ$)", "aspectLine.jpg")  # ,hatch='/'




    os.system("mv processLog.txt bins.txt")

    pass