Пример #1
0
def ortho_rectified_gdalwarp(input, output, dem_tif):

    #     gdalwarp -co compress=lzw -co tiled=yes -co bigtiff=if_safer -tr ${res} ${res} -t_srs ${prj} \
    #           -multi -wo NUM_THREADS=8  -r cubic -et 0.01 -to "RPC_DEM=${dem}" ${img}  ${out}
    CommandString = 'gdalwarp -co compress=lzw -co tiled=yes -co bigtiff=if_safer -tr ' + str(
        out_res) + ' ' + str(out_res)

    dem_prj = map_projection.get_raster_or_vector_srs_info_epsg(dem_tif)
    CommandString += ' -t_srs ' + dem_prj
    CommandString += ' -te ' + extent
    CommandString += ' -multi -wo NUM_THREADS=' + str(thread_num)
    CommandString += ' -r cubic -et 0.01 -rpc -to "RPC_DEM=%s" ' % dem_tif  # the ntf has rpc metadata
    CommandString += ' -dstnodata 0 '
    CommandString += ' %s %s  ' % (input, output)

    return basic.exec_command_string_one_file(CommandString, output)
Пример #2
0
def polygons2geojson(input_shp, save_folder):
    '''
    convert polygons in shapefiles to many geojson (each for one polygon)
    :param input_shp:
    :param save_folder:
    :return:
    '''
    io_function.is_file_exist(input_shp)
    if os.path.isdir(save_folder) is False:
        io_function.mkdir(save_folder)

    polygons, ids = vector_gpd.read_polygons_attributes_list(input_shp, 'id')
    prj_info = map_projection.get_raster_or_vector_srs_info_epsg(
        input_shp)  # geojson need EPSG, such as "EPSG:3413"
    # print(prj_info)
    for poly, id in zip(polygons, ids):
        save_one_polygon_2geojson(poly, id, prj_info, save_folder)
Пример #3
0
def main(options, args):

    extent_shp = args[0]
    dem_index_shp = args[1]

    pre_name = os.path.splitext(os.path.basename(extent_shp))[0]
    pre_name += '_Tile' if 'Tile' in os.path.basename(
        dem_index_shp) else '_Strip'

    xlsx_size_path = os.path.splitext(
        os.path.basename(dem_index_shp))[0] + '_fileSize.xlsx'
    print('xlsx file for saving file size', xlsx_size_path)

    # extent polygons and projection (proj4)
    extent_shp_prj = map_projection.get_raster_or_vector_srs_info_proj4(
        extent_shp)
    dem_shp_prj = map_projection.get_raster_or_vector_srs_info_proj4(
        dem_index_shp)

    if extent_shp_prj != dem_shp_prj:
        basic.outputlogMessage(
            '%s and %s do not have the same projection, will reproject %s' %
            (extent_shp, dem_index_shp, os.path.basename(extent_shp)))
        epsg = map_projection.get_raster_or_vector_srs_info_epsg(dem_index_shp)
        # print(epsg)
        # extent_polys = vector_gpd.read_shape_gpd_to_NewPrj(extent_shp,dem_shp_prj.strip())
        extent_polys = vector_gpd.read_shape_gpd_to_NewPrj(extent_shp, epsg)
    else:
        extent_polys = vector_gpd.read_polygons_gpd(extent_shp)

    # read 'grid_id' if the extent shp is from grid shp file, if not, grid_id_list will be None
    grid_id_list = vector_gpd.read_attribute_values_list(extent_shp, 'grid_id')

    if len(extent_polys) < 1:
        raise ValueError('No polygons in %s' % extent_shp)
    else:
        basic.outputlogMessage('%d extent polygons in %s' %
                               (len(extent_polys), extent_shp))

    get_file_size_dem_tarball(dem_index_shp,
                              extent_polys,
                              pre_name,
                              xlsx_size_path,
                              poly_ids=grid_id_list)
Пример #4
0
# crop, mosaic, and reproject if necessary
if extent_shp_or_ids_txt.endswith('.shp'):
    pre_name = os.path.splitext(os.path.basename(extent_shp_or_ids_txt))[0]

    # extent polygons and projection (proj4)
    extent_shp_prj = map_projection.get_raster_or_vector_srs_info_proj4(
        extent_shp_or_ids_txt)
    grid_shp_prj = map_projection.get_raster_or_vector_srs_info_proj4(
        grid_20_shp)

    if extent_shp_prj != grid_shp_prj:
        basic.outputlogMessage(
            '%s and %s do not have the same projection, will reproject %s' %
            (extent_shp_or_ids_txt, grid_20_shp,
             os.path.basename(extent_shp_or_ids_txt)))
        epsg = map_projection.get_raster_or_vector_srs_info_epsg(grid_20_shp)
        extent_polys = vector_gpd.read_shape_gpd_to_NewPrj(
            extent_shp_or_ids_txt, epsg)
    else:
        extent_polys = vector_gpd.read_polygons_gpd(extent_shp_or_ids_txt)

    crop_mosaic_reproject_dem_diff(grid_dem_tifs,
                                   pre_name,
                                   extent_poly,
                                   o_res,
                                   new_prj,
                                   b_mosaic=False)


def main(options, args):
    pass
Пример #5
0
def main(options, args):

    save_dir = options.save_dir
    extent_shp = options.extent_shp
    process_num = options.process_num
    o_res = options.out_res
    b_mosaic_id = options.create_mosaic_id
    b_mosaic_date = options.create_mosaic_date
    keep_dem_percent = options.keep_dem_percent

    dem_dir_or_txt = args[0]
    if os.path.isfile(dem_dir_or_txt):
        dem_list = io_function.read_list_from_txt(dem_dir_or_txt)
    else:
        dem_list = io_function.get_file_list_by_ext('.tif', dem_dir_or_txt, bsub_folder=False)
        dem_list = [ tif for tif in dem_list if 'matchtag' not in tif ] # remove matchtag
    dem_count = len(dem_list)
    if dem_count < 1:
        raise ValueError('No input dem files in %s' % dem_dir_or_txt)

    resample_method= 'average'


    if extent_shp is None:
        # groups DEM based on the same strip ID
        dem_groups = group_demTif_strip_pair_ID(dem_list)
        # mosaic them direclty without consider the extent
        mosaic_dir = os.path.join(save_dir, 'dem_stripID_mosaic' )
        mosaic_dem_same_stripID(dem_groups, mosaic_dir, resample_method, process_num=process_num, save_source=True,
                                o_format='GTiff')
    else:
        extent_shp_base = os.path.splitext(os.path.basename(extent_shp))[0]
        dem_prj = map_projection.get_raster_or_vector_srs_info_epsg(dem_list[0])
        extent_prj = map_projection.get_raster_or_vector_srs_info_epsg(extent_shp)

        # # check projection (time-consuming if there are many tif files)
        # for dem_tif in dem_list:
        #     prj = map_projection.get_raster_or_vector_srs_info_epsg(dem_tif)
        #     if dem_prj != prj:
        #         raise ValueError('The projection inconsistent among dems (%s is different)'%dem_tif)

        dem_ext_polys = get_dem_tif_ext_polygons(dem_list)

        if extent_prj==dem_prj:
            extent_polys = vector_gpd.read_polygons_gpd(extent_shp)
        else:
            extent_polys = vector_gpd.read_shape_gpd_to_NewPrj(extent_shp,dem_prj)

        if len(extent_polys) < 1:
            raise ValueError('No polygons in %s' % extent_shp)
        else:
            basic.outputlogMessage('%d extent polygons in %s' % (len(extent_polys), extent_shp))

        extPolys_ids = vector_gpd.read_attribute_values_list(extent_shp, 'id')
        if extPolys_ids is None or None in extPolys_ids:
            basic.outputlogMessage('Warning, field: id is not in %s, will create default ID for each grid' % extent_shp)
            extPolys_ids = [id + 1 for id in range(len(extent_polys))]

        for idx, ext_poly in zip(extPolys_ids, extent_polys):
            basic.outputlogMessage('mosaic and crop DEM for the %d th extent (%d in total)' % (idx, len(extent_polys)))
            # get subset of DEM
            dem_poly_ids = vector_gpd.get_poly_index_within_extent(dem_ext_polys, ext_poly)
            if len(dem_poly_ids) < 1:
                basic.outputlogMessage('no dem tifs within %d polygons'%idx)
                continue
            dem_list_sub = [dem_list[id] for id in dem_poly_ids]

            mosaic_crop_dem(dem_list_sub, save_dir, idx, ext_poly, b_mosaic_id, b_mosaic_date,
                                 process_num, keep_dem_percent, o_res, extent_shp_base, resample_method='average')
Пример #6
0
def get_grid_20(extent_shp_or_id_txt, grid_polys, ids):
    '''
    get grid polygons and ids based on input extent (polygon in shpaefile) or ids (txt file)
    if "file_name_base+'_grid_ids.txt'" exists, it will read id in this file directly.
    :param extent_shp_or_id_txt:
    :param grid_polys:
    :param ids:
    :return:
    '''

    io_function.is_file_exist(extent_shp_or_id_txt)

    if extent_shp_or_id_txt.endswith('.txt'):
        grid_ids = io_function.read_list_from_txt(extent_shp_or_id_txt)
        grid_ids = [int(item) for item in grid_ids ]
    else:
        shp_corresponding_grid_ids_txt = get_corresponding_grid_ids_txt(extent_shp_or_id_txt)
        if os.path.isfile(shp_corresponding_grid_ids_txt):
            print('corresponding grid ids txt file for %s exists, read grid id from txt'%extent_shp_or_id_txt)
            grid_ids = [ int(item) for item in io_function.read_list_from_txt(shp_corresponding_grid_ids_txt)]
            basic.outputlogMessage('read %d grids within the extents (%s)'
                                   % (len(grid_ids), os.path.basename(extent_shp_or_id_txt)))
        else:
            # extent polygons and projection (proj4)
            extent_shp_prj = map_projection.get_raster_or_vector_srs_info_proj4(extent_shp_or_id_txt)
            if extent_shp_prj == '':
                raise ValueError('get proj4 of %s failed'%extent_shp_or_id_txt)
            grid_shp_prj = map_projection.get_raster_or_vector_srs_info_proj4(grid_20_shp)
            if grid_shp_prj=='':
                raise ValueError('get proj4 of %s failed' % grid_20_shp)

            if extent_shp_prj != grid_shp_prj:
                basic.outputlogMessage('%s and %s do not have the same projection, will reproject %s'
                                       % (extent_shp_or_id_txt, grid_20_shp, os.path.basename(extent_shp_or_id_txt)))
                epsg = map_projection.get_raster_or_vector_srs_info_epsg(grid_20_shp)
                # print(epsg)
                # extent_polys = vector_gpd.read_shape_gpd_to_NewPrj(extent_shp,dem_shp_prj.strip())
                extent_polys = vector_gpd.read_shape_gpd_to_NewPrj(extent_shp_or_id_txt, epsg)
            else:
                extent_polys = vector_gpd.read_polygons_gpd(extent_shp_or_id_txt)

            ext_poly_count = len(extent_polys)
            if ext_poly_count < 1:
                raise ValueError('No polygons in %s'%extent_shp_or_id_txt)
            grid_index = []
            # if there are many polygons, this will take time.
            for idx,ext_poly in enumerate(extent_polys):
                print(timeTools.get_now_time_str(), 'get grids for extent idx', idx, 'total polygons:',ext_poly_count)
                index = vector_gpd.get_poly_index_within_extent(grid_polys, ext_poly)
                grid_index.extend(index)
            grid_index = list(set(grid_index))  # remove duplicated ids
            basic.outputlogMessage('find %d grids within the extents (%s)' % (len(grid_index), os.path.basename(extent_shp_or_id_txt)) )

            grid_ids = [ ids[idx] for idx in grid_index]
            grid_ids_str = [str(item) for item in grid_ids ]
            io_function.save_list_to_txt(shp_corresponding_grid_ids_txt,grid_ids_str)

    id_index = [ids.index(id) for id in grid_ids]
    selected_grid_polys = [grid_polys[idx] for idx in id_index ]

    return selected_grid_polys, grid_ids
Пример #7
0
def main(options, args):

    save_dir = options.save_dir
    extent_shp = options.extent_shp
    process_num = options.process_num

    dem_dir_or_txt = args[0]
    if os.path.isfile(dem_dir_or_txt):
        dem_list = io_function.read_list_from_txt(dem_dir_or_txt)
    else:
        dem_list = io_function.get_file_list_by_ext('.tif',
                                                    dem_dir_or_txt,
                                                    bsub_folder=False)
    dem_count = len(dem_list)
    if dem_count < 1:
        raise ValueError('No input dem files in %s' % dem_dir_or_txt)

    if extent_shp is not None:
        pre_name = os.path.splitext(os.path.basename(extent_shp))[0]
    else:
        pre_name = os.path.basename(os.path.abspath(save_dir))
    save_dem_diff = os.path.join(save_dir, pre_name + '_DEM_diff.tif')
    save_date_diff = os.path.join(save_dir, pre_name + '_date_diff.tif')
    if os.path.isfile(save_dem_diff) and os.path.isfile(save_date_diff):
        print('%s and %s exists, skip' % (save_dem_diff, save_date_diff))
        return

    if extent_shp is not None:
        # crop the DEM before differencing
        extent_shp_base = os.path.splitext(os.path.basename(extent_shp))[0]
        dem_prj = map_projection.get_raster_or_vector_srs_info_epsg(
            dem_list[0])
        extent_prj = map_projection.get_raster_or_vector_srs_info_epsg(
            extent_shp)
        if dem_prj != extent_prj:
            raise ValueError(
                'The projection of extent file (%s) and dem tifs is different'
                % extent_shp)

        extent_polys = vector_gpd.read_polygons_gpd(extent_shp)
        if len(extent_polys) != 1:
            raise ValueError('Only allow one polygon in %s' % extent_shp)

        extPolys_ids = vector_gpd.read_attribute_values_list(extent_shp, 'id')
        if extPolys_ids is None or None in extPolys_ids:
            basic.outputlogMessage(
                'Warning, field: id is not in %s, will create default ID for each grid'
                % extent_shp)
            extPolys_ids = [id + 1 for id in range(len(extent_polys))]

        # crop
        for idx, ext_poly in zip(extPolys_ids, extent_polys):
            basic.outputlogMessage(
                'crop and differnce DEM for the %d th extent (%d in total)' %
                (idx, len(extent_polys)))
            crop_dem_list = crop_to_same_exent_for_diff(
                dem_list, save_dir, idx, ext_poly, process_num)

            dem_list = crop_dem_list

    dem_diff_newest_oldest(dem_list,
                           save_dem_diff,
                           save_date_diff,
                           process_num,
                           b_max_subsidence=options.max_subsidence)
Пример #8
0
def main(options, args):

    extent_shp = args[0]
    dem_index_shp = args[1]
    b_arcticDEM_tile = False

    global max_task_count
    max_task_count = options.max_process_num

    if 'Tile' in os.path.basename(dem_index_shp):
        save_folder = arcticDEM_tile_tarball_dir
        reg_tif_dir = arcticDEM_tile_reg_tif_dir
        b_arcticDEM_tile = True
    else:
        save_folder = tarball_dir
        reg_tif_dir = arcticDEM_reg_tif_dir

    # use the user specific save_dir for saving downloaded tarballs
    if options.save_dir is not None:
        save_folder = options.save_dir
    if os.path.isdir(save_folder) is False:
        io_function.mkdir(save_folder)
    save_folder = os.path.abspath(save_folder)  # change to absolute path

    pre_name = os.path.splitext(os.path.basename(extent_shp))[0]
    pre_name += '_Tile' if 'Tile' in os.path.basename(
        dem_index_shp) else '_Strip'

    # extent polygons and projection (proj4)
    extent_shp_prj = map_projection.get_raster_or_vector_srs_info_proj4(
        extent_shp)
    dem_shp_prj = map_projection.get_raster_or_vector_srs_info_proj4(
        dem_index_shp)

    if extent_shp_prj != dem_shp_prj:
        basic.outputlogMessage(
            '%s and %s do not have the same projection, will reproject %s' %
            (extent_shp, dem_index_shp, os.path.basename(extent_shp)))
        epsg = map_projection.get_raster_or_vector_srs_info_epsg(dem_index_shp)
        # print(epsg)
        # extent_polys = vector_gpd.read_shape_gpd_to_NewPrj(extent_shp,dem_shp_prj.strip())
        extent_polys = vector_gpd.read_shape_gpd_to_NewPrj(extent_shp, epsg)
    else:
        extent_polys = vector_gpd.read_polygons_gpd(extent_shp)

    # read 'grid_id' if the extent shp is from grid shp file, if not, grid_id_list will be None
    grid_id_list = vector_gpd.read_attribute_values_list(extent_shp, 'grid_id')

    if len(extent_polys) < 1:
        raise ValueError('No polygons in %s' % extent_shp)
    else:
        basic.outputlogMessage(
            'read %d extent polygons in %s for downloading and proc' %
            (len(extent_polys), extent_shp))

    download_dem_tarball(dem_index_shp,
                         extent_polys,
                         save_folder,
                         pre_name,
                         reg_tif_dir=reg_tif_dir,
                         poly_ids=grid_id_list,
                         b_arcticDEM_tile=b_arcticDEM_tile)
Пример #9
0
def main(options, args):

    extent_shp = args[0]
    # ext_shp_prj = map_projection.get_raster_or_vector_srs_info_epsg(extent_shp)
    # reproject if necessary, it seems that the gdalwarp can handle different projection
    # if ext_shp_prj != 'EPSG:3413':  # EPSG:3413 is the projection ArcticDEM used
    #     extent_shp_reprj = io_function.get_name_by_adding_tail(extent_shp,'3413')
    #     vector_gpd.reproject_shapefile(extent_shp,'EPSG:3413',extent_shp_reprj)
    #     extent_shp = extent_shp_reprj

    tar_dir = options.ArcticDEM_dir
    save_dir = options.save_dir
    b_mosaic_id = options.create_mosaic_id
    b_mosaic_date = options.create_mosaic_date
    b_rm_inter = options.remove_inter_data
    keep_dem_percent = options.keep_dem_percent
    inter_format = options.format
    arcticDEM_shp = options.arcticDEM_shp
    o_res = options.out_res
    b_dem_diff = options.create_dem_diff
    dem_list_txt = options.dem_list_txt

    # create mosaic is time consuming, but it also takes a lot memory. For a region of 50 km by 50 km, it may take 10 to 50 GB memory
    process_num = options.process_num
    basic.outputlogMessage(
        'The number of processes for creating the mosaic is: %d' % process_num)

    extent_shp_base = os.path.splitext(os.path.basename(extent_shp))[0]
    extent_prj = map_projection.get_raster_or_vector_srs_info_epsg(extent_shp)

    b_ArcticDEM_tar = False
    dem_tif_list = []
    if tar_dir is not None and arcticDEM_shp is not None:
        b_ArcticDEM_tar = True
    else:
        dem_tif_list = io_function.read_list_from_txt(dem_list_txt)
        # check projection
        for dem_tif in dem_tif_list:
            dem_prj = map_projection.get_raster_or_vector_srs_info_epsg(
                dem_tif)
            if dem_prj != extent_prj:
                raise ValueError('The projection of %s is different from %s' %
                                 (dem_prj, extent_prj))

    b_ArcticDEM_tiles = False
    if b_ArcticDEM_tar:
        arcdem_prj = map_projection.get_raster_or_vector_srs_info_epsg(
            arcticDEM_shp)

        # read dem polygons and url
        time0 = time.time()
        dem_polygons, dem_urls = vector_gpd.read_polygons_attributes_list(
            arcticDEM_shp, 'fileurl', b_fix_invalid_polygon=False)
        print('time cost of read polygons and attributes', time.time() - time0)
        basic.outputlogMessage('%d dem polygons in %s' %
                               (len(dem_polygons), extent_shp))
        # get tarball list
        tar_list = io_function.get_file_list_by_ext('.gz',
                                                    tar_dir,
                                                    bsub_folder=False)
        if len(tar_list) < 1:
            raise ValueError('No input tar.gz files in %s' % tar_dir)

        if is_ArcticDEM_tiles(tar_list):
            basic.outputlogMessage('Input is the mosaic version of ArcticDEM')
            b_ArcticDEM_tiles = True

        if extent_prj == arcdem_prj:
            extent_polys = vector_gpd.read_polygons_gpd(extent_shp)
        else:
            extent_polys = vector_gpd.read_shape_gpd_to_NewPrj(
                extent_shp, arcdem_prj)
    else:
        extent_polys = vector_gpd.read_polygons_gpd(extent_shp)

    if len(extent_polys) < 1:
        raise ValueError('No polygons in %s' % extent_shp)
    else:
        basic.outputlogMessage('%d extent polygons in %s' %
                               (len(extent_polys), extent_shp))

    extPolys_ids = vector_gpd.read_attribute_values_list(extent_shp, 'id')
    if extPolys_ids is None or None in extPolys_ids:
        basic.outputlogMessage(
            'Warning, field: id is not in %s, will create default ID for each grid'
            % extent_shp)
        extPolys_ids = [id + 1 for id in range(len(extent_polys))]

    # print('extPolys_ids, count',extPolys_ids, len(extent_polys))

    same_extent = False
    if b_dem_diff:
        # crop each one to the same extent, easy for DEM differnce.
        same_extent = True

    for idx, ext_poly in zip(extPolys_ids, extent_polys):
        basic.outputlogMessage('get data for the %d th extent (%d in total)' %
                               (idx, len(extent_polys)))

        if b_ArcticDEM_tar:
            if b_ArcticDEM_tiles:
                proc_ArcticDEM_tile_one_grid_polygon(tar_dir, dem_polygons,
                                                     dem_urls, o_res, save_dir,
                                                     inter_format, b_rm_inter,
                                                     ext_poly, idx,
                                                     extent_shp_base)
            else:
                proc_ArcticDEM_strip_one_grid_polygon(
                    tar_dir,
                    dem_polygons,
                    dem_urls,
                    o_res,
                    save_dir,
                    inter_format,
                    b_mosaic_id,
                    b_mosaic_date,
                    b_rm_inter,
                    b_dem_diff,
                    ext_poly,
                    idx,
                    keep_dem_percent,
                    process_num,
                    extent_shp_base,
                    resample_method='average',
                    same_extent=same_extent)
        else:
            proc_dem_mosaic_diff(dem_tif_list,
                                 save_dir,
                                 idx,
                                 ext_poly,
                                 b_mosaic_id,
                                 b_mosaic_date,
                                 process_num,
                                 keep_dem_percent,
                                 o_res,
                                 b_dem_diff,
                                 extent_shp_base,
                                 b_rm_inter,
                                 resample_method='average')
Пример #10
0
def main():
    dem_index_shp = os.path.expanduser(
        '~/Data/Arctic/ArcticDEM/BROWSE_SERVER/indexes/ArcticDEM_Tile_Index_Rel7/ArcticDEM_Tile_Index_Rel7.shp'
    )
    # extent_shp = os.path.expanduser('~/Data/PDO/PDO_statistics_swatchs/swatch_bounding_boxes.shp')
    extent_shp = os.path.expanduser(
        '~/Data/PDO/extent_each_swatch/merge_all_qa_exent.shp')

    # extent polygons and projection (proj4)
    extent_shp_prj = map_projection.get_raster_or_vector_srs_info_proj4(
        extent_shp)
    dem_shp_prj = map_projection.get_raster_or_vector_srs_info_proj4(
        dem_index_shp)

    if extent_shp_prj != dem_shp_prj:
        basic.outputlogMessage(
            '%s and %s do not have the same projection, will reproject %s' %
            (extent_shp, dem_index_shp, os.path.basename(extent_shp)))
        epsg = map_projection.get_raster_or_vector_srs_info_epsg(dem_index_shp)
        # print(epsg)
        # extent_polys = vector_gpd.read_shape_gpd_to_NewPrj(extent_shp,dem_shp_prj.strip())
        extent_polys = vector_gpd.read_shape_gpd_to_NewPrj(extent_shp, epsg)
    else:
        extent_polys = vector_gpd.read_polygons_gpd(extent_shp)

    poly_ids = [idx for idx in range(len(extent_polys))]
    if 'boxes' in os.path.basename(extent_shp):
        nc_file_names = vector_gpd.read_attribute_values_list(
            extent_shp, 'nc_file')
    else:
        nc_file_names = vector_gpd.read_attribute_values_list(
            extent_shp, 'layer')

    # read dem polygons and tile number
    dem_polygons, dem_tiles = vector_gpd.read_polygons_attributes_list(
        dem_index_shp, 'tile', b_fix_invalid_polygon=False)

    for count, (idx, ext_poly) in enumerate(zip(poly_ids, extent_polys)):
        basic.outputlogMessage('get data for the %d th extent (%d/%d)' %
                               (idx, count, len(extent_polys)))

        save_txt_path = nc_file_names[idx] + '-' + 'dem_tiles_poly_%d.txt' % idx
        if os.path.isfile(save_txt_path):
            tiles = io_function.read_list_from_txt(save_txt_path)
            basic.outputlogMessage('read %d dem tiles from %s' %
                                   (len(tiles), save_txt_path))
        else:
            # get fileurl
            dem_poly_ids = vector_gpd.get_poly_index_within_extent(
                dem_polygons, ext_poly)
            basic.outputlogMessage('find %d DEM within %d th extent' %
                                   (len(dem_poly_ids), (idx)))
            tiles = [dem_tiles[id] for id in dem_poly_ids]

            # save to txt
            io_function.save_list_to_txt(save_txt_path, tiles)
            basic.outputlogMessage('save dem urls to %s' % save_txt_path)

        # download and create a mosaic
        url_head = 'https://data.pgc.umn.edu/elev/dem/setsm/ArcticDEM/mosaic/v3.0/32m/'
        download_tarball_for_one_polygon(tarball_dir, dem_tif_dir, url_head,
                                         tiles)

        # create a mosaic
        create_a_mosaic(nc_file_names[idx], idx, dem_eachSwatch_dir, ext_poly,
                        tiles)

    bak_folder = 'small_tifs'
    io_function.mkdir(bak_folder)
    # remove small and duplicated ones
    for file_name in nc_file_names:
        crop_tifs = io_function.get_file_list_by_pattern(
            dem_eachSwatch_dir, file_name + '*crop.tif')
        if len(crop_tifs) == 1:
            pass
        elif len(crop_tifs) > 1:
            # keep maximum one and move small ones
            tif_files_size = [
                io_function.get_file_size_bytes(item) for item in crop_tifs
            ]
            max_size = max(tif_files_size)
            max_index = tif_files_size.index(max_size)
            del crop_tifs[max_index]
            for tmp in crop_tifs:
                io_function.movefiletodir(tmp, bak_folder)
                tmp = tmp.replace('_crop', '')
                io_function.movefiletodir(tmp, bak_folder)

        else:  # no tif
            raise ValueError('Results for %s does not exist' % file_name)