Exemple #1
0
def main():

    hillshade_dir = os.path.join(work_dir,'hillshade_sub_images')
    dem_slope_8bit_dir = os.path.join(work_dir,'dem_slope_8bit_sub_images')
    dem_relative_8bit_dir = os.path.join(work_dir,'dem_relative_8bit_sub_images')
    other_dirs = [dem_slope_8bit_dir,dem_relative_8bit_dir]
    other_dirs_tifs = [ io_function.get_file_list_by_ext('.tif', o_dir, bsub_folder=True) for o_dir in  other_dirs]


    json_list = io_function.get_file_list_by_ext('.json', hillshade_dir, bsub_folder=True)
    json_base_list = [os.path.basename(item) for item in json_list]

    for json_path, base_name in zip(json_list, json_base_list):
        date_str, poly_num = get_date_str_poly_num(base_name)

        for tif_list in other_dirs_tifs:

            for tif in tif_list:
                name_noext = io_function.get_name_no_ext(tif)
                if date_str in name_noext and poly_num in name_noext:
                    # modify and save the json file
                    dst_path = os.path.join(os.path.dirname(tif), name_noext+'.json')
                    # io_function.copy_file_to_dst(json_path,dst_path)
                    data_dict = io_function.read_dict_from_txt_json(json_path)
                    data_dict['imagePath'] = os.path.basename(tif)
                    data_dict['imageData'] = None
                    io_function.save_dict_to_txt_json(dst_path, data_dict)
                    print('saving %s'%dst_path)

                    break

        pass
def read_precipitation_series(data_folder, station_no):

    txt_list = io_function.get_file_list_by_ext('.TXT',
                                                data_folder,
                                                bsub_folder=False)
    txt_list_2 = io_function.get_file_list_by_ext('.txt',
                                                  data_folder,
                                                  bsub_folder=False)
    txt_list.extend(txt_list_2)

    # read daily mean, max, and min precipitation
    date_list, mean_pre, max_pre, min_pre = read_data_series(
        station_no, txt_list)

    # convert to mm
    mean_pre = [tem / 10.0 for tem in mean_pre]  #
    max_pre = [tem / 10.0 for tem in max_pre]
    min_pre = [tem / 10.0 for tem in min_pre]

    date_list_new = []
    mean_pre_new = []
    max_pre_new = []
    min_pre_new = []
    # remove outliers
    for date, pre1, pre2, pre3 in zip(date_list, mean_pre, max_pre, min_pre):
        if pre1 > 1000 or pre2 > 1000 or pre3 > 1000:
            continue
        date_list_new.append(date)
        mean_pre_new.append(pre1)
        max_pre_new.append(pre2)
        min_pre_new.append(pre3)

    return date_list_new, mean_pre_new, max_pre_new, min_pre_new
Exemple #3
0
def main():
    ntf_list = io_function.get_file_list_by_ext('.ntf',
                                                os.path.join(dir, 'DATA'),
                                                bsub_folder=True)
    io_function.save_list_to_txt('ntf_list.txt', ntf_list)
    dem_list = io_function.get_file_list_by_ext('.tif',
                                                os.path.join(dir, 'PRODUCTS'),
                                                bsub_folder=True)
    dem_list = [
        item for item in dem_list
        if item.endswith('_dem.tif') and 'strips' in item
    ]
    io_function.save_list_to_txt('dem_list.txt', dem_list)

    for idx, ntf in enumerate(ntf_list):
        print(' (%d/%d) working on ' % (idx + 1, len(ntf_list)), ntf)
        name = os.path.basename(ntf)
        scene_id = name.split('_')[2]
        print('scene_id:', scene_id)

        dem_path = None
        for dem_tif in dem_list:
            if scene_id in os.path.basename(dem_tif):
                dem_path = dem_tif
                break
        if dem_path is None:
            raise ValueError('Cannot find the corresponding DEM')

        output = os.path.splitext(name)[0] + '_ortho_sub.tif'
        ortho_rectified_gdalwarp(ntf, output, dem_path)
        # break

    pass
Exemple #4
0
def split_an_image(para_file, image_path, save_dir, patch_w, patch_h,
                   overlay_x, overlay_y):

    split_format = parameters.get_string_parameters(para_file,
                                                    'split_image_format')
    out_format = 'PNG'  # default is PNG
    if split_format == '.tif': out_format = 'GTIFF'
    if split_format == '.jpg': out_format = 'JPEG'
    if os.path.isdir(save_dir) is False:
        io_function.mkdir(save_dir)

    split_image.split_image(image_path,
                            save_dir,
                            patch_w,
                            patch_h,
                            overlay_x,
                            overlay_y,
                            out_format,
                            pre_name=None,
                            process_num=8)
    # get list
    patch_list = io_function.get_file_list_by_ext(split_format,
                                                  save_dir,
                                                  bsub_folder=False)
    if len(patch_list) < 1:
        print('Wanring, no images in %s' % save_dir)
        return None
    list_txt_path = save_dir + '_list.txt'
    io_function.save_list_to_txt(list_txt_path, patch_list)
    return list_txt_path
Exemple #5
0
def main(options, args):

    save_dir = options.save_dir
    dem_dir_or_txt = args[0]
    ref_dem = options.ref_dem
    dem_valid_per_txt = options.dem_valid_per_txt
    process_num = options.process_num

    if os.path.isfile(dem_dir_or_txt):
        dem_list = io_function.read_list_from_txt(dem_dir_or_txt)
    else:
        dem_list = io_function.get_file_list_by_ext('.tif',
                                                    dem_dir_or_txt,
                                                    bsub_folder=False)
        if dem_valid_per_txt is None:
            dem_valid_per_txt = os.path.join(dem_dir_or_txt,
                                             'dem_valid_percent.txt')
    dem_count = len(dem_list)
    if dem_count < 1:
        raise ValueError('No input dem files in %s' % dem_dir_or_txt)

    if ref_dem is None:
        ref_dem = choose_reference_dem(dem_list, dem_valid_per_txt)
        if ref_dem is None:
            raise ValueError('Cannot find a reference DEM')

    if ref_dem in dem_list:
        dem_list.remove(ref_dem)
    # co_registration_parallel(ref_dem,dem_list,save_dir,process_num)
    co_registration_multi_process(ref_dem, dem_list, save_dir, process_num)
Exemple #6
0
def read_down_load_geometry(folder):
    '''
    read geojson files in a folder. geojson file stores the geometry of a file, and save to global varialbes
    :param folder: the save folder
    :return:
    '''
    global downloaded_scene_geometry
    json_list = io_function.get_file_list_by_ext('.geojson',
                                                 folder,
                                                 bsub_folder=False)
    for json_file in json_list:

        # ignore the scenes in the excluded list
        item_id = os.path.splitext(os.path.basename(json_file))[0]
        if item_id in manually_excluded_scenes:
            continue

        scene_folder = os.path.splitext(json_file)[0]
        asset_files = io_function.get_file_list_by_pattern(scene_folder, '*')
        if len(asset_files) < 3:
            basic.outputlogMessage(
                'downloading of scene %s is not compelte, ignore it' % item_id)
            continue

        with open(json_file) as json_file:
            data = json.load(json_file)
            # p(data) # test
            downloaded_scene_geometry.append(data)
Exemple #7
0
def main(options, args):

    # folder containing images (download from Google Earth Engine)
    img_folder = args[0]
    img_file_list = io_function.get_file_list_by_ext('.tif',
                                                     img_folder,
                                                     bsub_folder=False)

    # img_file_list = img_file_list[:2]   # for test

    # ndvi
    # batch_cal_msi(img_file_list, 'landsat8_ndvi.tif', cal_ndvi_landsat8)

    # ndwi
    # batch_cal_msi(img_file_list, 'landsat8_ndwi.tif', cal_ndwi_landsat8)

    # ndmi
    # batch_cal_msi(img_file_list, 'landsat8_ndmi.tif', cal_ndmi_landsat8)

    # brightness
    # batch_cal_msi(img_file_list, 'landsat8_brightness.tif', cal_brightness_landsat8)

    # greenness
    batch_cal_msi(img_file_list, 'landsat8_greenness.tif',
                  cal_greenness_landsat8)

    # wetness
    batch_cal_msi(img_file_list, 'landsat8_wetness.tif', cal_wetness_landsat8)

    pass
Exemple #8
0
def main(options, args):
    in_folder = args[0]
    template_ini = args[1]

    image_paths = io_function.get_file_list_by_ext('.tif',
                                                   in_folder,
                                                   bsub_folder=True)
    if len(image_paths) < 1:
        raise IOError('no tif files in %s' % in_folder)

    # get unique dir list
    img_dir_list = [os.path.dirname(item) for item in image_paths]
    img_dir_list = set(img_dir_list)

    region_ini_files_list = []
    for img_dir in img_dir_list:
        # copy template file
        out_ini = create_new_region_defined_parafile(template_ini, img_dir,
                                                     options.area_remark)
        region_ini_files_list.append(out_ini)

    with open('region_ini_files.txt', 'a') as f_obj:
        for ini in region_ini_files_list:
            f_obj.writelines(ini + '\n')

    pass
def main(options, args):

    save_dir = options.save_dir
    b_rm_inter = options.remove_inter_data
    b_rm_tarball = options.remove_tarball

    tar_dir = args[0]
    if os.path.isfile(tar_dir):
        tar_list = [tar_dir]
    else:
        tar_list = io_function.get_file_list_by_ext('.gz',
                                                    tar_dir,
                                                    bsub_folder=False)
        tar_count = len(tar_list)
        if tar_count < 1:
            raise ValueError('No input tar.gz files in %s' % tar_dir)

    if is_ArcticDEM_tiles(tar_list):
        apply_registration = False
    else:
        apply_registration = True

    work_dir = './'
    b_rm_tarball = True  # remove tarball after unpacking
    process_dem_tarball(tar_list,
                        work_dir,
                        save_dir,
                        remove_inter_data=b_rm_inter,
                        rm_tarball=b_rm_tarball,
                        apply_registration=apply_registration)
Exemple #10
0
def main(options, args):
    input = args[0]

    if input.endswith('.txt'):
        slope_tifs = io_function.read_list_from_txt(input)
    elif os.path.isdir(input):
        slope_tifs = io_function.get_file_list_by_ext('.tif',input, bsub_folder=True)
    else:
        slope_tifs = [ input]
    process_num = options.process_num

    working_dir = './'
    save_dir = dem_headwall_shp_dir
    if os.path.isdir(working_dir) is False:
        io_function.mkdir(working_dir)
    if os.path.isdir(save_dir) is False:
        io_function.mkdir(save_dir)

    failed_tifs = []

    min_slope = options.min_slope
    min_size = options.min_area
    max_size = options.max_area
    max_axis_width = options.max_axis_width
    max_box_WH = options.max_box_WH
    for idx, slope in enumerate(slope_tifs):
        if extract_headwall_from_slope(idx, len(slope_tifs), slope,working_dir,save_dir, min_slope,min_size,max_size,max_axis_width,max_box_WH,process_num) is False:
            failed_tifs.append(slope)

    io_function.save_list_to_txt('extract_headwall_failed_tifs.txt',failed_tifs)
def run_extract_headwall_jobs(max_job_count, n_tif_per_jobs):

    from dem_common import dem_headwall_shp_dir, dem_slope_dir

    if os.path.isdir(dem_headwall_shp_dir) is False:
        io_function.mkdir(dem_headwall_shp_dir)

    # get slope file list
    slope_tifs = io_function.get_file_list_by_ext('.tif',
                                                  dem_slope_dir,
                                                  bsub_folder=False)
    print('Found %d tif in %s' % (len(slope_tifs), dem_slope_dir))

    # divide grid_ids to many groups
    slope_tif_count = len(slope_tifs)
    slope_tif_groups = [
        slope_tifs[i:i + n_tif_per_jobs]
        for i in range(0, slope_tif_count, n_tif_per_jobs)
    ]

    for idx, slope_tifs_group in enumerate(slope_tif_groups):

        print(
            datetime.now(),
            'processing %d group for extracting headwall, total %d ones' %
            (idx, len(slope_tif_groups)))
        submit_extract_headwall_job(slope_tifs_group, idx, max_job_count)
Exemple #12
0
def main(options, args):

    # folder containing images (download from Google Earth Engine)
    # img_folder = args[0]
    img_folder = '/Users/huanglingcao/Data/Qinghai-Tibet/beiluhe/beiluhe_landsat/LT05_2010to2011'
    img_file_list = io_function.get_file_list_by_ext('.tif',img_folder,bsub_folder=False)

    # img_file_list = img_file_list[:2]   # for test
    satellite = 'landsat5'
    # #ndvi
    batch_cal_msi(img_file_list, satellite+'_ndvi.tif', cal_ndvi_landsat5)

    #ndwi
    batch_cal_msi(img_file_list, satellite+'_ndwi.tif', cal_ndwi_landsat5)

    #ndmi
    batch_cal_msi(img_file_list, satellite+'_ndmi.tif', cal_ndmi_landsat5)

    #brightness
    batch_cal_msi(img_file_list, satellite+'_brightness.tif', cal_brightness_landsat5)

    # greenness
    batch_cal_msi(img_file_list, satellite+'_greenness.tif', cal_greenness_landsat5)

    # wetness
    batch_cal_msi(img_file_list, satellite+'_wetness.tif', cal_wetness_landsat5)






    pass
Exemple #13
0
def test_zonal_stats_multiRasters():

    shp = os.path.expanduser(
        '~/Data/Arctic/canada_arctic/Willow_River/Willow_River_Thaw_Slumps.shp'
    )
    # save_shp = os.path.basename(io_function.get_name_by_adding_tail(shp,'raster_stats'))

    # a single DEM
    # dem_file_dir = os.path.expanduser('~/Data/Arctic/canada_arctic/DEM/WR_dem_ArcticDEM_mosaic')
    # dem_path = os.path.join(dem_file_dir,'WR_extent_2m_v3.0_ArcticTileDEM_sub_1_prj.tif')

    # dem patches
    dem_file_dir = os.path.expanduser(
        '~/Data/Arctic/canada_arctic/DEM/WR_dem_ArcticDEM_mosaic/dem_patches')
    dem_list = io_function.get_file_list_by_ext('.tif',
                                                dem_file_dir,
                                                bsub_folder=False)
    save_shp = os.path.basename(
        io_function.get_name_by_adding_tail(shp, 'multi_raster_stats'))

    io_function.copy_shape_file(shp, save_shp)
    zonal_stats_multiRasters(save_shp,
                             dem_list,
                             nodata=None,
                             band=1,
                             stats=None,
                             prefix='dem',
                             range=None,
                             all_touched=True,
                             process_num=4)
Exemple #14
0
def main(options, args):
    # data_ini_dir = os.path.expanduser('~/Data/Arctic/canada_arctic/autoMapping/WR_multiDate_inis')
    # training_root_dir = os.path.expanduser('~/Data/Arctic/canada_arctic/autoMapping/ray_results/tune_dataAug_para_tesia')
    # template_dir = os.path.expanduser('~/Data/Arctic/canada_arctic/autoMapping/eval_new_data')

    data_ini_dir = os.path.expanduser(
        '~/Data/Arctic/canada_arctic/autoMapping/area_multiDate_inis')
    training_root_dir = os.path.expanduser(
        '~/Data/Arctic/canada_arctic/autoMapping/multiArea_deeplabV3+_8')
    template_dir = os.path.expanduser(
        '~/Data/Arctic/canada_arctic/autoMapping/multiArea_deeplabV3+_8')

    if options.data_ini_dir_or_list is not None:
        data_ini_dir = options.data_ini_dir_or_list
    if options.training_root_dir is not None:
        training_root_dir = options.training_root_dir
    if options.template_dir is not None:
        template_dir = options.template_dir

    # get data list
    if os.path.isdir(data_ini_dir):
        area_ini_list = io_function.get_file_list_by_ext('.ini',
                                                         data_ini_dir,
                                                         bsub_folder=False)
    else:
        area_ini_list = io_function.read_list_from_txt(data_ini_dir)
        # change to abslute path, since later, directory will be changed
        area_ini_list = [os.path.abspath(item) for item in area_ini_list]

    for idx, area_ini in enumerate(area_ini_list):
        basic.outputlogMessage('%d/%d evaluation on %d areas' %
                               (idx, len(area_ini_list), idx))
        run_evaluation_one_dataset(idx, area_ini, training_root_dir,
                                   template_dir)
Exemple #15
0
def main(options, args):

    input_path = args[0]
    if os.path.isdir(input_path) is True:
        if options.ext is None:
            basic.outputlogMessage(
                'image file extenstion is need, type help for more information'
            )
            return False

        file_list = io_function.get_file_list_by_ext(options.ext,
                                                     input_path,
                                                     bsub_folder=True)
        f_obj = open('images_list.txt', 'w')
        f_obj.writelines(["%s\n" % item for item in file_list])
        f_obj.close()
    elif os.path.isfile(input_path):
        f_obj = open(input_path, 'r')
        file_list = f_obj.readlines()
        f_obj.close()
    else:
        basic.outputlogMessage('input error:  %s' % input_path)
        return False

    for i in range(0, len(file_list)):
        file_list[i] = file_list[i].strip()

    return calculate_mean_of_images(file_list)
Exemple #16
0
def main(options, args):
    org_dir = '/scratch/summit/lihu9680'
    bak_dir = '/home/lihu9680/scripts_para_bak'

    if options.org_dir is not None:
        org_dir = options.org_dir
    if options.bak_dir is not None:
        bak_dir = options.bak_dir

    print('org_dir:', org_dir)
    print('bak_dir:', bak_dir)

    # get bash file (*.sh) list
    print('find *.sh and *.ini files')
    sh_list = io_function.get_file_list_by_ext('.sh',
                                               org_dir,
                                               bsub_folder=True)
    ini_list = io_function.get_file_list_by_ext('.ini',
                                                org_dir,
                                                bsub_folder=True)
    sh_list.extend(ini_list)
    changed_list = []
    for sh in sh_list:
        new_path = bak_dir + sh.replace(org_dir, '')
        # print(bak_path)
        mo_time = os.path.getmtime(sh)
        if os.path.isfile(new_path):
            bak_mo_time = os.path.getmtime(new_path)
            if mo_time == bak_mo_time:
                continue

        new_dir = os.path.dirname(new_path)
        if os.path.isdir(new_dir) is False:
            os.system('mkdir -p %s' % new_dir)

        res = os.system('cp -p %s %s' % (sh, new_path))
        if res != 0:
            sys.exit(1)
        changed_list.append(sh)

    if len(changed_list) < 1:
        print("no new or modified files")
    else:
        print("backup files:")
        for sh in changed_list:
            print(sh)
    def read_training_pixels_from_multi_images(input, subImg_folder,
                                               subLabel_folder):
        """
        read pixels from subset images, which are extracted from Planet images based on trainig polygons
        :param subImg_folder: the folder containing images
        :param subLabel_folder: the folder containing labels
        :return: X, y arrays or None
        """
        img_list = io_function.get_file_list_by_ext('.tif',
                                                    subImg_folder,
                                                    bsub_folder=False)
        label_list = io_function.get_file_list_by_ext('.tif',
                                                      subLabel_folder,
                                                      bsub_folder=False)
        img_list.sort()
        label_list.sort()

        if len(img_list) < 1 or len(label_list) < 1:
            raise IOError('No tif images or labels in folder %s or %s' %
                          (subImg_folder, subLabel_folder))
        if len(img_list) != len(label_list):
            raise ValueError(
                'the number of images is not equal to the one of labels')

        # read them one by one
        Xs, ys = [], []
        for img, label in zip(img_list, label_list):
            # # test by hlc
            # polygon_index_img = os.path.basename(img).split('_')[-3]
            # # print(polygon_index_img)
            # if polygon_index_img not in [str(83), str(86)] :
            #     continue

            X_aImg, y_a = read_training_pixels(img, label)
            Xs.append(X_aImg)
            ys.append(y_a)

        X_pixels = np.concatenate(Xs, axis=1)
        y_pixels = np.concatenate(ys, axis=0)
        X_pixels = np.transpose(X_pixels, (1, 0))
        basic.outputlogMessage(str(X_pixels.shape))
        basic.outputlogMessage(str(y_pixels.shape))

        return X_pixels, y_pixels
Exemple #18
0
def main():

    # get data list
    area_ini_list = io_function.get_file_list_by_ext('.ini',
                                                     data_ini_dir,
                                                     bsub_folder=False)
    for idx, area_ini in enumerate(area_ini_list):
        basic.outputlogMessage('%d/%d evaluation on %d areas' %
                               (idx, len(area_ini_list), idx))
        run_evaluation_one_dataset(idx, area_ini)
Exemple #19
0
def main(options, args):
    extent_shp_or_ids_txt = args[0]
    process_num = options.process_num
    o_res = options.out_res

    if os.path.isdir(grid_matchtag_sum_dir) is False:
        io_function.mkdir(grid_matchtag_sum_dir)

    basic.setlogfile('produce_matchtag_sum_ArcticDEM_log_%s.txt' %
                     timeTools.get_now_time_str())

    # read grids and ids
    time0 = time.time()
    all_grid_polys, all_ids = vector_gpd.read_polygons_attributes_list(
        grid_20_shp, 'id')
    print('time cost of read polygons and attributes', time.time() - time0)

    # get grid ids based on input extent
    grid_base_name = os.path.splitext(
        os.path.basename(extent_shp_or_ids_txt))[0]
    grid_polys, grid_ids = get_grid_20(extent_shp_or_ids_txt, all_grid_polys,
                                       all_ids)

    # check dem difference existence
    grid_dem_tifs, grid_ids_no_sum = get_existing_matchtag_sum(
        grid_matchtag_sum_dir, grid_base_name, grid_ids)
    if len(grid_ids_no_sum) > 0:
        # refine grid_polys
        if len(grid_ids) > len(grid_ids_no_sum):
            id_index = [grid_ids.index(id) for id in grid_ids_no_sum]
            grid_polys = [grid_polys[idx] for idx in id_index]

        # # download ArcticDEM and applying registration
        # tarballs, reg_tifs = download_dem_tarball(dem_strip_shp, grid_polys, arcticDEM_tarball_dir, grid_base_name,
        #                                         reg_tif_dir=arcticDEM_reg_tif_dir, poly_ids=grid_ids_no_demDiff)
        #
        # # unpack and applying registration
        # if len(tarballs) > 0:
        #     basic.outputlogMessage('Processs %d dem tarballs'%len(tarballs))
        #     out_reg_tifs = process_dem_tarball(tarballs,'./',arcticDEM_reg_tif_dir,remove_inter_data=True, apply_registration=True)
        #     basic.outputlogMessage('Get %d new registration dem tifs' % len(out_reg_tifs))
        #     reg_tifs.extend(out_reg_tifs)

        reg_tifs = io_function.get_file_list_by_ext('.tif',
                                                    arcticDEM_reg_tif_dir,
                                                    bsub_folder=False)
        matchtag_tifs = [tif for tif in reg_tifs
                         if 'matchtag' in tif]  # only keep matchtag
        # crop, sum
        out_dem_diffs = produce_matchtag_sum_grids(grid_polys,
                                                   grid_ids_no_sum,
                                                   grid_base_name,
                                                   matchtag_tifs,
                                                   o_res,
                                                   process_num=process_num)
Exemple #20
0
def create_new_region_defined_parafile(template_para_file,
                                       img_dir,
                                       area_remark=None):
    '''
    create a new region defined para file. Only defined the new images (did not change others)
    :param template_para_file:
    :param img_dir:
    :param area_remark:
    :return:
    '''
    io_function.is_file_exist(template_para_file)

    dir_base = os.path.basename(img_dir)
    date_strs = re.findall('\d{8}', dir_base)
    if len(date_strs) == 1:
        date = date_strs[0]
    else:
        date = 'unknown'

    new_para_file = io_function.get_name_by_adding_tail(
        template_para_file, date + '_' + area_remark)
    new_para_file = os.path.basename(new_para_file)  # save to current folder
    if os.path.isfile(new_para_file):
        raise IOError('%s already exists, please check or remove first' %
                      new_para_file)

    # copy the file
    io_function.copy_file_to_dst(template_para_file, new_para_file)

    if area_remark is not None:
        modify_parameter(new_para_file, 'area_remark', area_remark)
    if date != 'unknown':
        modify_parameter(new_para_file, 'area_time', date)

    modify_parameter(new_para_file, 'input_image_dir', img_dir)
    modify_parameter(new_para_file, 'inf_image_dir', img_dir)

    tif_list = io_function.get_file_list_by_ext('.tif',
                                                img_dir,
                                                bsub_folder=False)
    if len(tif_list) < 1:
        raise ValueError('No tif in %s' % img_dir)
    if len(tif_list) == 1:
        modify_parameter(new_para_file, 'input_image_or_pattern',
                         os.path.basename(tif_list[0]))
        modify_parameter(new_para_file, 'inf_image_or_pattern',
                         os.path.basename(tif_list[0]))
    else:
        modify_parameter(new_para_file, 'input_image_or_pattern', '*.tif')
        modify_parameter(new_para_file, 'inf_image_or_pattern', '*.tif')

    print("modified and saved new parameter file: %s " % new_para_file)

    return new_para_file
Exemple #21
0
def read_grid_ids_from_other_extent():
    grid_txt_list = io_function.get_file_list_by_ext('.txt',
                                                     grid_ids_txt_dir,
                                                     bsub_folder=False)
    other_grid_ids = []
    for txt in grid_txt_list:
        id_list = io_function.read_list_from_txt(txt)
        other_grid_ids.extend(id_list)

    other_grid_ids = [int(item) for item in other_grid_ids]
    return other_grid_ids
def read_temperature_series(data_folder, station_no):

    txt_list = io_function.get_file_list_by_ext('.TXT',
                                                data_folder,
                                                bsub_folder=False)
    txt_list_2 = io_function.get_file_list_by_ext('.txt',
                                                  data_folder,
                                                  bsub_folder=False)
    txt_list.extend(txt_list_2)

    # read daily mean, max, and min temperature
    date_list, mean_tem, max_tem, min_tem = read_data_series(
        station_no, txt_list)

    # convert to degree
    mean_tem = [tem / 10.0
                for tem in mean_tem]  # ignore  if tem < 1000 and tem > -1000
    max_tem = [tem / 10.0 for tem in max_tem]
    min_tem = [tem / 10.0 for tem in min_tem]

    return date_list, mean_tem, max_tem, min_tem
def test_remove_based_slope():

    dir = os.path.expanduser('~/Data/dem_processing/segment_parallel_9274')
    in_shp = os.path.join(dir,'WR_extent_grid_ids_DEM_diff_grid9274_8bit.shp')
    rm_shapeinfo_shp = os.path.join(dir,'WR_extent_grid_ids_DEM_diff_grid9274_8bit_rmshapeinfo.shp')
    process_num = 1

    # remove based on slope
    # use the slope derived from ArcitcDEM mosaic
    slope_tif_list = io_function.get_file_list_by_ext('.tif',dem_common.arcticDEM_tile_slope_dir,bsub_folder=False)
    basic.outputlogMessage('Find %d slope files in %s'%(len(slope_tif_list), dem_common.arcticDEM_tile_slope_dir))
    rm_slope_shp = io_function.get_name_by_adding_tail(in_shp, 'rmslope')
    max_slope = 20
    remove_based_slope(rm_shapeinfo_shp, rm_slope_shp,slope_tif_list, max_slope,process_num)
def test_get_dem_tif_ext_polygons():
    work_dir = os.path.expanduser(
        '~/Data/Arctic/canada_arctic/DEM/WR_dem_diff/dem_tifs')
    os.chdir(work_dir)

    tifs = io_function.get_file_list_by_ext('.tif',
                                            work_dir,
                                            bsub_folder=False)
    polygons = dem_mosaic_crop.get_dem_tif_ext_polygons(tifs)

    data = {'poly': polygons}
    pddata = pd.DataFrame(data)
    wkt_str = map_projection.get_raster_or_vector_srs_info_wkt(tifs[0])
    save_path = 'tif_extent.shp'
    vector_gpd.save_polygons_to_files(pddata, 'poly', wkt_str, save_path)
def main(options, args):
    extent_shp_or_ids_txt = args[0]
    process_num = options.process_num
    keep_dem_percent = options.keep_dem_percent
    o_res = options.out_res

    basic.setlogfile('produce_headwall_shp_ArcticDEM_log_%s.txt' %
                     timeTools.get_now_time_str())

    if os.path.isdir(grid_dem_headwall_shp_dir) is False:
        io_function.mkdir(grid_dem_headwall_shp_dir)

        # read grids and ids
    time0 = time.time()
    all_grid_polys, all_ids = vector_gpd.read_polygons_attributes_list(
        grid_20_shp, 'id')
    print('time cost of read polygons and attributes', time.time() - time0)

    # get grid ids based on input extent
    grid_base_name = os.path.splitext(
        os.path.basename(extent_shp_or_ids_txt))[0]
    grid_polys, grid_ids = get_grid_20(extent_shp_or_ids_txt, all_grid_polys,
                                       all_ids)

    # check dem difference existence
    grid_headwall_shps, grid_id_no_headwall_shp = get_existing_grid_headwall_shp(
        grid_dem_headwall_shp_dir, grid_base_name, grid_ids)
    if len(grid_id_no_headwall_shp) > 0:
        # refine grid_polys
        if len(grid_ids) > len(grid_id_no_headwall_shp):
            id_index = [grid_ids.index(id) for id in grid_id_no_headwall_shp]
            grid_polys = [grid_polys[idx] for idx in id_index]

        reg_tifs = io_function.get_file_list_by_ext('.tif',
                                                    arcticDEM_reg_tif_dir,
                                                    bsub_folder=False)
        reg_tifs = [tif for tif in reg_tifs
                    if 'matchtag' not in tif]  # remove matchtag
        #
        headwall_shp_folders = extract_headwall_grids(grid_polys,
                                                      grid_id_no_headwall_shp,
                                                      grid_base_name,
                                                      reg_tifs,
                                                      b_mosaic_id,
                                                      b_mosaic_date,
                                                      keep_dem_percent,
                                                      o_res,
                                                      process_num=process_num)
def main(options, args):
    # process_num = multiprocessing.cpu_count()
    process_num = options.process_num
    if len(args) > 0:
        data_folder = args[0]
    else:
        data_folder = grid_dem_headwall_shp_dir

    basic.outputlogMessage('check shapefiles in %s, with %d processes'%(data_folder,process_num))


    vector_files = io_function.get_file_list_by_ext('.shp',data_folder ,bsub_folder=True)
    save_invalid_txt_path = os.path.basename(data_folder) + '_incomplete_list.txt'
    save_good_txt_path = os.path.basename(data_folder) + '_good_list.txt'
    file_count = len(vector_files)
    basic.outputlogMessage('The number of vector files: %d'%file_count)

    good_files = []
    if os.path.isfile(save_good_txt_path):
        good_files.extend(io_function.read_list_from_txt(save_good_txt_path))
    incomplete_files = []

    # remove good one for the list
    if len(good_files)>0:
        vector_files = [item for item in vector_files if os.path.basename(item) not in good_files]

    if process_num == 1:
        # tifs = io_function.get_file_list_by_ext('.tif',arcticDEM_reg_tif_dir, bsub_folder=False)
        for idx, tif in enumerate(vector_files):
            if check_one_vector_file(idx, file_count, tif, good_files):
                good_files.append(os.path.basename(tif))
            else:
                incomplete_files.append(os.path.basename(tif))
    else:
        theadPool = Pool(process_num)  # multi processes
        parameters_list = [(idx, file_count, tif, good_files) for idx, tif in enumerate(vector_files)]
        results = theadPool.starmap(check_one_vector_file, parameters_list)  # need python3
        for tif, res in zip(vector_files, results):
            if res:
                good_files.append(os.path.basename(tif))
            else:
                incomplete_files.append(os.path.basename(tif))
        theadPool.close()

    io_function.save_list_to_txt(save_invalid_txt_path, incomplete_files)
    io_function.save_list_to_txt(save_good_txt_path, good_files)
Exemple #27
0
def plot_valid_entropy(in_folder, save_file_pre=None):

    if save_file_pre is None:
        save_file_pre = os.path.basename(in_folder)

    logfile = basic.logfile
    basic.setlogfile(save_file_pre + 'hist_info.txt')
    image_paths = io_function.get_file_list_by_ext('.tif',
                                                   in_folder,
                                                   bsub_folder=True)
    if len(image_paths) < 1:
        raise IOError('no tif files in %s' % in_folder)
    valid_per_list = []
    entropy_list = []
    img_count = len(image_paths)
    for idx, img_path in enumerate(image_paths):
        print('%d/%d' % (idx + 1, img_count))
        valid_per, entropy = raster_io.get_valid_percent_shannon_entropy(
            img_path, log_base=10)
        valid_per_list.append(valid_per)
        entropy_list.append(entropy)

    per_entropy_txt = save_file_pre + '_' + 'valid_per_entropy.txt'
    save_hist_path = save_file_pre + '_' + 'hist.jpg'
    with open(per_entropy_txt, 'w') as f_obj:
        for path, per, entropy in zip(image_paths, valid_per_list,
                                      entropy_list):
            f_obj.writelines(
                os.path.basename(path) + ' %.4f  %.6f \n' % (per, entropy))

    # plot the histogram
    fig = plt.figure(figsize=(6, 4))  #
    ax1 = fig.add_subplot(111)
    n, bins, patches = plt.hist(x=entropy_list,
                                bins=50,
                                color='b',
                                rwidth=0.85)
    # print(n, bins, patches)
    plt.savefig(save_hist_path, dpi=200)  # 300
    histogram2logfile(entropy_list, bins, hist_tag=save_hist_path)

    basic.setlogfile(logfile)  # change log file name back
    return save_hist_path
def get_failed_grid_ids(task):
    if task == 'dem_diff':
        fail_log_dir = os.path.join(process_log_dir, 'get_dem_diff')
    elif task == 'dem_headwall_grid':
        fail_log_dir = os.path.join(process_log_dir,
                                    'extract_headwall_from_slope_grid')
    else:
        raise ValueError('Unknow task: %s' % str(task))

    fail_id_txt_list = io_function.get_file_list_by_ext('.txt',
                                                        fail_log_dir,
                                                        bsub_folder=False)
    old_id_txts = []

    # check they are old enough (24 hours)
    for txt in fail_id_txt_list:
        if check_file_or_dir_is_old(txt, 24, print_time=True):
            old_id_txts.append(txt)
    return old_id_txts
Exemple #29
0
def predict_remoteSensing_image(para_file, image_path, save_dir,model, config_file, yolo_data, batch_size=1, b_python_api=True):
    '''
    run prediction of a remote sensing using yolov4
    :param image_path:
    :param model:
    :param config_file:
    :param yolo_data:
    :param batch_size:
    :param b_python_api: if true, use the python API of yolo
    :return:
    '''

    patch_w = parameters.get_digit_parameters(para_file, "inf_patch_width", 'int')
    patch_h = parameters.get_digit_parameters(para_file, "inf_patch_height", 'int')
    overlay_x = parameters.get_digit_parameters(para_file, "inf_pixel_overlay_x", 'int')
    overlay_y = parameters.get_digit_parameters(para_file, "inf_pixel_overlay_y", 'int')

    if b_python_api:
        # using the python API
        predict_rs_image_yolo_poythonAPI(image_path, save_dir, model, config_file, yolo_data,
                                         patch_w, patch_h, overlay_x, overlay_y, batch_size=batch_size)

        # for each patch has a json file, may end up with a lot of json files, affect I/O
        # try to merge them to one json file.
        res_json_files = io_function.get_file_list_by_ext('.json', save_dir, bsub_folder=False)
        merge_josn_path = os.path.join(save_dir,'all_patches.json')
        merge_patch_json_files_to_one(res_json_files,merge_josn_path)
        for f_json in res_json_files:
            io_function.delete_file_or_dir(f_json)

    else:
        # divide image the many patches, then run prediction.
        patch_list_txt = split_an_image(para_file,image_path,save_dir,patch_w,patch_h,overlay_x,overlay_y)
        if patch_list_txt is None:
            return False
        result_json = save_dir + '_result.json'
        commond_str = 'darknet detector test ' + yolo_data + ' ' + config_file + ' ' + model + ' -dont_show '
        commond_str += ' -ext_output -out ' + result_json + ' < ' + patch_list_txt
        print(commond_str)
        res = os.system(commond_str)
        if res !=0:
            sys.exit(1)
Exemple #30
0
def read_down_load_geometry(folder):
    '''
    read geojson files in a folder. geojson file stores the geometry of a file, and save to global varialbes
    :param folder: the save folder
    :return:
    '''
    global downloaed_scene_geometry
    json_list = io_function.get_file_list_by_ext('.geojson',
                                                 folder,
                                                 bsub_folder=False)
    for json_file in json_list:

        # ignore the scenes in the excluded list
        item_id = os.path.splitext(os.path.basename(json_file))[0]
        if item_id in manually_excluded_scenes:
            continue

        with open(json_file) as json_file:
            data = json.load(json_file)
            # p(data) # test
            downloaed_scene_geometry.append(data)