Beispiel #1
0
def draw_one_value_hist(shp_file, field_name, output, logfile, bin_min,
                        bin_max, bin_width, ylim):

    basic.setlogfile(logfile)
    values = read_attribute(shp_file, field_name)
    if field_name == 'INarea':  # m^2 to ha
        values = [item / 10000.0 for item in values]

    xlabelrotation = None
    if 'area' in field_name or 'INperimete' in field_name or 'circularit' in field_name or 'aspectLine' in field_name or \
        'slo' in field_name or 'dem' in field_name or 'dem' in field_name:
        xlabelrotation = 90

    bins = np.arange(bin_min, bin_max, bin_width)

    # plot histogram of slope values
    # value_list,output,bins=None,labels=None,color=None,hatch=""
    draw_one_list_histogram(values,
                            output,
                            bins=bins,
                            color=['grey'],
                            xlabelrotation=xlabelrotation,
                            ylim=ylim)  # ,hatch='-'
    # io_function.move_file_to_dst('processLog.txt', logfile, overwrite=True)
    # io_function.move_file_to_dst('processLog.txt', os.path.join(out_dir, logfile), overwrite=True)
    # io_function.move_file_to_dst(output, os.path.join(out_dir, output), overwrite=True)
    basic.setlogfile('processLog.txt')
Beispiel #2
0
def main():
    basic.setlogfile('log_to_relative_dem_8bit.txt')

    if os.path.isdir(relative_dem_dir) is False:
        io_function.mkdir(relative_dem_dir)

    # 500 pixel by 500 pixel, that is 1 km by 1 km
    patch_width = 500
    patch_height = 500
    process_num = 1

    failed_tifs = []

    dem_reg_list = io_function.get_file_list_by_pattern(
        arcticDEM_reg_tif_dir, '*dem_reg.tif')
    count = len(dem_reg_list)
    for idx, tif in enumerate(dem_reg_list):
        print('%d/%d convert %s to relative DEM (8bit)' %
              (idx + 1, count, tif))
        rel_dem_8bit = io_function.get_name_by_adding_tail(tif, 'relDEM8bit')
        rel_dem_8bit = os.path.join(relative_dem_dir,
                                    os.path.basename(rel_dem_8bit))
        try:
            dem_to_relative_dem(tif, rel_dem_8bit, patch_width, patch_height,
                                process_num)
        except:
            failed_tifs.append(tif)

    with open('to_relative_dem_failed_cases.txt', 'w') as f_obj:
        for item in failed_tifs:
            f_obj.writelines(item + '\n')
    pass
def segment_a_dem_diff(dem_diff_path,
                       process_num,
                       ele_diff_thr,
                       min_area,
                       max_area,
                       job_id=0):

    basic.setlogfile('log_segment_dem_diff_job_%d.txt' % job_id)

    # find 8bit one
    tif_8bit = io_function.get_name_by_adding_tail(dem_diff_path, '8bit')
    demD_8bit = os.path.join(dem_common.grid_dem_diffs_8bit_dir,
                             os.path.basename(tif_8bit))
    if os.path.isfile(demD_8bit):
        print('error, 8bit DEM diff not exists: %s ' % demD_8bit)
        return False

    grid_id = int(
        re.findall('grid\d+', os.path.basename(dem_diff_path))[0][4:])

    save_dir = os.path.join(dem_common.grid_dem_diffs_segment_dir,
                            'segment_result_grid%d' % grid_id)
    return segment_subsidence_grey_image(demD_8bit,
                                         dem_diff_path,
                                         save_dir,
                                         process_num,
                                         subsidence_thr_m=ele_diff_thr,
                                         min_area=min_area,
                                         max_area=max_area)
Beispiel #4
0
def draw_two_values_hist(shp_file, field_name, raster_file, output, logfile,
                         bin_min, bin_max, bin_width, labels, ylim):

    basic.setlogfile(logfile)
    raster_values = read_oneband_image_to_1dArray(raster_file,
                                                  nodata=0,
                                                  ignore_small=bin_min)
    bins = np.arange(bin_min, bin_max, bin_width)

    # update
    global global_bin_size
    global_bin_size = bin_width
    ylim = [item / (100.0 * bin_width) for item in ylim]

    draw_two_list_histogram(shp_file,
                            field_name,
                            raster_values,
                            output,
                            bins=bins,
                            labels=labels,
                            color=['black', 'silver'],
                            ylim=ylim)
    # io_function.move_file_to_dst('processLog.txt', logfile, overwrite=True)
    # io_function.move_file_to_dst('processLog.txt', os.path.join(out_dir,logfile), overwrite=True)
    # io_function.move_file_to_dst(output, os.path.join(out_dir,output), overwrite=True)
    basic.setlogfile('processLog.txt')
Beispiel #5
0
def main(options, args):
    extent_shp_or_ids_txt = args[0]
    process_num = options.process_num
    o_res = options.out_res

    if os.path.isdir(grid_matchtag_sum_dir) is False:
        io_function.mkdir(grid_matchtag_sum_dir)

    basic.setlogfile('produce_matchtag_sum_ArcticDEM_log_%s.txt' %
                     timeTools.get_now_time_str())

    # read grids and ids
    time0 = time.time()
    all_grid_polys, all_ids = vector_gpd.read_polygons_attributes_list(
        grid_20_shp, 'id')
    print('time cost of read polygons and attributes', time.time() - time0)

    # get grid ids based on input extent
    grid_base_name = os.path.splitext(
        os.path.basename(extent_shp_or_ids_txt))[0]
    grid_polys, grid_ids = get_grid_20(extent_shp_or_ids_txt, all_grid_polys,
                                       all_ids)

    # check dem difference existence
    grid_dem_tifs, grid_ids_no_sum = get_existing_matchtag_sum(
        grid_matchtag_sum_dir, grid_base_name, grid_ids)
    if len(grid_ids_no_sum) > 0:
        # refine grid_polys
        if len(grid_ids) > len(grid_ids_no_sum):
            id_index = [grid_ids.index(id) for id in grid_ids_no_sum]
            grid_polys = [grid_polys[idx] for idx in id_index]

        # # download ArcticDEM and applying registration
        # tarballs, reg_tifs = download_dem_tarball(dem_strip_shp, grid_polys, arcticDEM_tarball_dir, grid_base_name,
        #                                         reg_tif_dir=arcticDEM_reg_tif_dir, poly_ids=grid_ids_no_demDiff)
        #
        # # unpack and applying registration
        # if len(tarballs) > 0:
        #     basic.outputlogMessage('Processs %d dem tarballs'%len(tarballs))
        #     out_reg_tifs = process_dem_tarball(tarballs,'./',arcticDEM_reg_tif_dir,remove_inter_data=True, apply_registration=True)
        #     basic.outputlogMessage('Get %d new registration dem tifs' % len(out_reg_tifs))
        #     reg_tifs.extend(out_reg_tifs)

        reg_tifs = io_function.get_file_list_by_ext('.tif',
                                                    arcticDEM_reg_tif_dir,
                                                    bsub_folder=False)
        matchtag_tifs = [tif for tif in reg_tifs
                         if 'matchtag' in tif]  # only keep matchtag
        # crop, sum
        out_dem_diffs = produce_matchtag_sum_grids(grid_polys,
                                                   grid_ids_no_sum,
                                                   grid_base_name,
                                                   matchtag_tifs,
                                                   o_res,
                                                   process_num=process_num)
def main(options, args):
    extent_shp_or_ids_txt = args[0]
    process_num = options.process_num
    keep_dem_percent = options.keep_dem_percent
    o_res = options.out_res

    basic.setlogfile('produce_headwall_shp_ArcticDEM_log_%s.txt' %
                     timeTools.get_now_time_str())

    if os.path.isdir(grid_dem_headwall_shp_dir) is False:
        io_function.mkdir(grid_dem_headwall_shp_dir)

        # read grids and ids
    time0 = time.time()
    all_grid_polys, all_ids = vector_gpd.read_polygons_attributes_list(
        grid_20_shp, 'id')
    print('time cost of read polygons and attributes', time.time() - time0)

    # get grid ids based on input extent
    grid_base_name = os.path.splitext(
        os.path.basename(extent_shp_or_ids_txt))[0]
    grid_polys, grid_ids = get_grid_20(extent_shp_or_ids_txt, all_grid_polys,
                                       all_ids)

    # check dem difference existence
    grid_headwall_shps, grid_id_no_headwall_shp = get_existing_grid_headwall_shp(
        grid_dem_headwall_shp_dir, grid_base_name, grid_ids)
    if len(grid_id_no_headwall_shp) > 0:
        # refine grid_polys
        if len(grid_ids) > len(grid_id_no_headwall_shp):
            id_index = [grid_ids.index(id) for id in grid_id_no_headwall_shp]
            grid_polys = [grid_polys[idx] for idx in id_index]

        reg_tifs = io_function.get_file_list_by_ext('.tif',
                                                    arcticDEM_reg_tif_dir,
                                                    bsub_folder=False)
        reg_tifs = [tif for tif in reg_tifs
                    if 'matchtag' not in tif]  # remove matchtag
        #
        headwall_shp_folders = extract_headwall_grids(grid_polys,
                                                      grid_id_no_headwall_shp,
                                                      grid_base_name,
                                                      reg_tifs,
                                                      b_mosaic_id,
                                                      b_mosaic_date,
                                                      keep_dem_percent,
                                                      o_res,
                                                      process_num=process_num)
Beispiel #7
0
def main():
    basic.setlogfile('log_convert_dem_diff_to8bit.txt')
    if os.path.isdir(grid_dem_diffs_8bit_dir) is False:
        io_function.mkdir(grid_dem_diffs_8bit_dir)

    dem_diff_list = io_function.get_file_list_by_pattern(grid_dem_diffs_dir,'*DEM_diff_grid*.tif')
    count = len(dem_diff_list)
    failed_tifs = []
    for idx, tif in enumerate(dem_diff_list):
        print('%d/%d convert %s to 8 bit'%(idx+1, count, tif))
        tif_8bit = io_function.get_name_by_adding_tail(tif, '8bit')
        output = os.path.join(grid_dem_diffs_8bit_dir, os.path.basename(tif_8bit))
        if dem_tif_to_8bit(tif,output) is False:
            failed_tifs.append(tif)

    if len(failed_tifs)>0:
        io_function.save_list_to_txt('failed_dem_diff_to8bit.txt',failed_tifs)
Beispiel #8
0
def main():

    basic.setlogfile('scp_log.txt')

    while True:
        # get remote dir
        basic.outputlogMessage('get remote folders')
        remote_folders = get_remote_folder(remote_dir, folder_pattern)
        basic.outputlogMessage("%d remote folders" % len(remote_folders))

        folder_list = get_local_folder(local_dir, folder_pattern)
        basic.outputlogMessage("%d local folders" % len(folder_list))

        folder_name_list = [os.path.basename(item) for item in folder_list]

        for idx, r_folders in enumerate(remote_folders):
            folder_name = os.path.basename(r_folders)
            if folder_name in folder_name_list:
                continue

            basic.outputlogMessage('copy trained folder in %s' % folder_name)
            command_str = 'scp -r ${tesia_host}:%s %s/%s' % (
                r_folders, local_dir, folder_name)
            print(command_str)
            status, result = basic.getstatusoutput(command_str)

            if status != 0:
                sys.exit(1)

        folder_list = get_local_folder(local_dir,
                                       folder_pattern)  # update local folder
        # reomve incomplete folders
        for folder in folder_list:
            res_json = os.path.join(folder, 'result.json')
            if os.path.isfile(res_json) and os.path.getsize(res_json) > 0:
                continue
            else:
                basic.outputlogMessage('remove incomplete folder: %s' %
                                       os.path.basename(folder))
                io_function.delete_file_or_dir(folder)

        basic.outputlogMessage('wait five hours')
        time.sleep(3600 * 5)  # wait five hours

    pass
Beispiel #9
0
def plot_valid_entropy(in_folder, save_file_pre=None):

    if save_file_pre is None:
        save_file_pre = os.path.basename(in_folder)

    logfile = basic.logfile
    basic.setlogfile(save_file_pre + 'hist_info.txt')
    image_paths = io_function.get_file_list_by_ext('.tif',
                                                   in_folder,
                                                   bsub_folder=True)
    if len(image_paths) < 1:
        raise IOError('no tif files in %s' % in_folder)
    valid_per_list = []
    entropy_list = []
    img_count = len(image_paths)
    for idx, img_path in enumerate(image_paths):
        print('%d/%d' % (idx + 1, img_count))
        valid_per, entropy = raster_io.get_valid_percent_shannon_entropy(
            img_path, log_base=10)
        valid_per_list.append(valid_per)
        entropy_list.append(entropy)

    per_entropy_txt = save_file_pre + '_' + 'valid_per_entropy.txt'
    save_hist_path = save_file_pre + '_' + 'hist.jpg'
    with open(per_entropy_txt, 'w') as f_obj:
        for path, per, entropy in zip(image_paths, valid_per_list,
                                      entropy_list):
            f_obj.writelines(
                os.path.basename(path) + ' %.4f  %.6f \n' % (per, entropy))

    # plot the histogram
    fig = plt.figure(figsize=(6, 4))  #
    ax1 = fig.add_subplot(111)
    n, bins, patches = plt.hist(x=entropy_list,
                                bins=50,
                                color='b',
                                rwidth=0.85)
    # print(n, bins, patches)
    plt.savefig(save_hist_path, dpi=200)  # 300
    histogram2logfile(entropy_list, bins, hist_tag=save_hist_path)

    basic.setlogfile(logfile)  # change log file name back
    return save_hist_path
Beispiel #10
0
import numpy as np


from distutils.version import LooseVersion
import tqdm
if LooseVersion(tqdm.__version__) < LooseVersion("4.34"):
    raise EnvironmentError('tqdm version should be newer (>= 4.34) for url download, run pip install tqdm -U')
from tqdm import tqdm

HOME = os.path.expanduser('~')
# path of DeeplabforRS
codes_dir2 = HOME +'/codes/PycharmProjects/DeeplabforRS'
sys.path.insert(0, codes_dir2)

import basic_src.basic as basic
basic.setlogfile('pytorch_deeplab_train.log')

from mypath import Path
from dataloaders import make_data_loader
from modeling.sync_batchnorm.replicate import patch_replication_callback
from modeling.deeplab import *
from utils.loss import SegmentationLosses
from utils.calculate_weights import calculate_weigths_labels
from utils.lr_scheduler import LR_Scheduler
from utils.saver import Saver
from utils.summaries import TensorboardSummary
from utils.metrics import Evaluator

class Trainer(object):
    def __init__(self, args):
        self.args = args
Beispiel #11
0
import sys, os
from optparse import OptionParser

import cv2
import numpy as np

HOME = os.path.expanduser('~')

# path of DeeplabforRS
codes_dir2 = HOME + '/codes/PycharmProjects/DeeplabforRS'
sys.path.insert(0, codes_dir2)
import parameters
import basic_src.basic as basic

basic.setlogfile('check_label_patches.txt')


def main(options, args):

    input_file = args[0]
    img_patch_dir = options.image_dir
    label_patch_dir = options.label_dir

    basic.outputlogMessage(
        'check split image patches and label patches in %s, especially after data augmentation'
        % input_file)
    b_diff = False

    num_classes_noBG = parameters.get_digit_parameters(options.para_file,
                                                       'NUM_CLASSES_noBG',
Beispiel #12
0
                      "--to_rgb",
                      action="store_true",
                      dest="to_rgb",
                      default=False,
                      help="true to convert all images to 8 bit rgb")

    parser.add_option(
        "-g",
        "--group_date",
        action="store_true",
        dest="group_date",
        default=False,
        help="true to group image if their acquisition date is the same")

    # parser.add_option("-i", "--item_types",
    #                   action="store", dest="item_types",default='PSScene4Band',
    #                   help="the item types, e.g., PSScene4Band,PSOrthoTile")
    # parser.add_option("-a", "--planet_account",
    #                   action="store", dest="planet_account",default='*****@*****.**',
    #                   help="planet email account, e.g., [email protected]")

    (options, args) = parser.parse_args()
    # print(options.to_rgb)
    if len(sys.argv) < 2 or len(args) < 1:
        parser.print_help()
        sys.exit(2)

    basic.setlogfile('mosaic_images_crop_grid_%s.log' %
                     str(datetime.date(datetime.now())))

    main(options, args)
add time: 24 September, 2019
"""

# note: it seems the codes cannot run on multi-nodes on ITSC services. So I have to submit jobs using different separately

import os, sys
import time

HOME = os.path.expanduser('~')
codes_dir2 = HOME +'/codes/PycharmProjects/DeeplabforRS'
sys.path.insert(0, codes_dir2)

import basic_src.basic as basic
import basic_src.io_function as io_function

basic.setlogfile('parallel_predict_rtsLog.txt')

predict_script = HOME + '/codes/PycharmProjects/Landuse_DL/sentinelScripts/predict_rts_oneImg.sh'

import GPUtil
import datetime
from multiprocessing import Process

machine_name = os.uname()[1]

start_time = datetime.datetime.now()

# remove previous results
outdir = 'multi_inf_results'
if os.path.isdir(outdir) and 'chpc' not in machine_name:  # on ITSC service, need to manually deleted previous results
    io_function.delete_file_or_dir(outdir)
Beispiel #14
0
    parser.add_option("",
                      "--extent_shp",
                      action="store",
                      dest="extent_shp",
                      help="the area extent")

    parser.add_option(
        "-n",
        "--region_name",
        action="store",
        dest="region_name",
        help="the name of the area, which we download snow cover for")

    # parser.add_option("", "--extent_xy",
    #                   action="store", dest="extent_xy",
    #                   help="a list of xy points")

    # parser.add_option("-b", "--buffer_size",
    #                   action="store", dest="buffer_size", type=int, default = 3000,
    #                   help="the buffer size to crop image in meters")

    (options, args) = parser.parse_args()
    if len(sys.argv) < 2 or len(args) < 1:
        parser.print_help()
        sys.exit(2)

    basic.setlogfile('get_timelapse_img_gee_%s.log' %
                     str(datetime.date(datetime.now())))

    main(options, args)
Beispiel #15
0
    # dem_path = os.path.join(dem_file_dir,'WR_extent_2m_v3.0_ArcticTileDEM_sub_1_prj.tif')

    # dem patches
    dem_file_dir = os.path.expanduser(
        '~/Data/Arctic/canada_arctic/DEM/WR_dem_ArcticDEM_mosaic/dem_patches')
    dem_list = io_function.get_file_list_by_ext('.tif',
                                                dem_file_dir,
                                                bsub_folder=False)
    save_shp = os.path.basename(
        io_function.get_name_by_adding_tail(shp, 'multi_raster_stats'))

    io_function.copy_shape_file(shp, save_shp)
    zonal_stats_multiRasters(save_shp,
                             dem_list,
                             nodata=None,
                             band=1,
                             stats=None,
                             prefix='dem',
                             range=None,
                             all_touched=True,
                             process_num=4)


def main():
    test_zonal_stats_multiRasters()
    pass


if __name__ == '__main__':
    basic.setlogfile('raster_statistic.log')
    main()
Beispiel #16
0
def image_augment_main(para_file, img_list_txt, save_list, img_dir, out_dir,
                       extension, is_ground_truth, proc_num):

    basic.setlogfile('log_data_augmentation.txt')

    if os.path.isfile(img_list_txt) is False:
        raise IOError("File %s not exist" % img_list_txt)

    if os.path.isdir(out_dir) is False:
        os.makedirs(out_dir)

    if img_dir != out_dir:
        raise ValueError(
            'set image dir and output dir be the same, making it easy to update image list'
        )

    # print(options.para_file)
    augmentation = parameters.get_string_list_parameters_None_if_absence(
        para_file, 'data_augmentation')
    if augmentation is None or len(augmentation) < 1:
        basic.outputlogMessage(
            'No input augmentation requirement (e.g. flip), skip data augmentation'
        )
        return True

    # number of classes
    num_classes_noBG = parameters.get_digit_parameters_None_if_absence(
        para_file, 'NUM_CLASSES_noBG', 'int')
    global num_classes
    num_classes = num_classes_noBG + 1

    # ignored classes
    ignore_classes = parameters.get_string_list_parameters_None_if_absence(
        para_file, 'data_aug_ignore_classes')

    with open(img_list_txt, 'r') as f_obj:
        files_list = f_obj.readlines()
    file_count = len(files_list)
    index = 1
    # for line in files_list:
    #
    #     # ignore_classes
    #     if ignore_classes is not None and len(ignore_classes)>0:
    #         found_class = [ line.find(ignore_class) >= 0 for ignore_class in ignore_classes ]
    #         if True in found_class:
    #             continue
    #
    #     file_path  = line.strip()
    #     file_path = os.path.join(img_dir,file_path+extension)
    #     print ("Augmentation of image (%d / %d)"%(index,file_count))
    #     if image_augment(file_path,out_dir,is_ground_truth,augment=augmentation) is False:
    #         print ('Error, Failed in image augmentation')
    #         return False
    #     index += 1

    parameters_list = [(index + 1, line, ignore_classes, file_count, img_dir,
                        extension, out_dir, is_ground_truth, augmentation)
                       for index, line in enumerate(files_list)]
    theadPool = Pool(proc_num)  # multi processes
    results = theadPool.starmap(augment_one_line,
                                parameters_list)  # need python3
    augmented = [1 for item in results if item is True]
    # print(sum(augmented))

    if sum(augmented) < file_count:
        basic.outputlogMessage('Some of the images belong to %s are ignored' %
                               ','.join(ignore_classes))

    # update img_list_txt (img_dir is the same as out_dir)
    new_files = io_function.get_file_list_by_ext(extension,
                                                 out_dir,
                                                 bsub_folder=False)
    new_files_noext = [
        os.path.splitext(os.path.basename(item))[0] + '\n'
        for item in new_files
    ]
    basic.outputlogMessage('save new file list to %s' % save_list)
    with open(save_list, 'w') as f_obj:
        f_obj.writelines(new_files_noext)
Beispiel #17
0
    usage = "usage: %prog [options] shapefile or shapefiles"
    parser = OptionParser(usage=usage, version="1.0 2017-10-28")
    parser.description = 'Introduction: plot accuracies of the results  '

    parser.add_option("-p", "--para",
                      action="store", dest="para_file",default='para.ini',
                      help="the parameters file")
    parser.add_option("-o", "--output",
                      action="store", dest="output",default='P_R.jpg',
                      help="the parameters file")

    (options, args) = parser.parse_args()
    if len(sys.argv) < 2 or len(args) < 1:
        parser.print_help()
        sys.exit(2)
    # set parameters files, mandatory for the path of ground truth polygons
    if options.para_file is None:
        print('error, no parameters file')
        parser.print_help()
        sys.exit(2)
    else:
        parameters.set_saved_parafile_path(options.para_file)

    basic.setlogfile('accuracies_log.txt')

    main(options, args)


    pass

Beispiel #18
0
                      "--para",
                      action="store",
                      dest="para_file",
                      default='para.ini',
                      help="the parameters file")

    parser.add_option("-i",
                      "--iou_threshold",
                      action="store",
                      dest="iou_threshold",
                      type=float,
                      default=0.01,
                      help="the iou threshold")

    (options, args) = parser.parse_args()
    if len(sys.argv) < 2 or len(args) < 1:
        parser.print_help()
        sys.exit(2)
    # set parameters files, mandatory for the path of ground truth polygons
    if options.para_file is None:
        print('error, no parameters file')
        parser.print_help()
        sys.exit(2)
    else:
        parameters.set_saved_parafile_path(options.para_file)

    basic.setlogfile('get_trueFN_log.txt')
    main(options, args)

    pass
Beispiel #19
0
def main(options, args):

    process_num = options.process_num
    buffer_size = options.buffer_size
    # perform the selection grid by grid
    basic.setlogfile('select_RTS_YOLO_demDiff_headwall_%s.txt' %
                     timeTools.get_now_time_str())

    b_grid = options.b_grid
    if b_grid:
        # process the selection grid by grid
        extent_shp_or_ids_txt = args[0]
        yolo_result_dir = os.path.expanduser(
            '~/Data/Arctic/alaska/autoMapping/alaskaNS_yolov4_1')
        dem_subsidence_dir = grid_dem_diffs_segment_dir
        grid_headwall_dir = grid_dem_headwall_shp_dir

        # read grids and ids
        time0 = time.time()
        all_grid_polys, all_ids = vector_gpd.read_polygons_attributes_list(
            grid_20_shp, 'id')
        print('time cost of read polygons and attributes', time.time() - time0)

        # get grid ids based on input extent
        grid_base_name = os.path.splitext(
            os.path.basename(extent_shp_or_ids_txt))[0]
        grid_polys, grid_ids = get_grid_20(extent_shp_or_ids_txt,
                                           all_grid_polys, all_ids)

        # check dem difference existence
        grid_rts_shps, grid_id_no_rts_shp = get_existing_select_grid_rts(
            grid_rts_shp_dir, grid_base_name, grid_ids)

        if len(grid_id_no_rts_shp) > 0:
            # refine grid_polys
            if len(grid_ids) > len(grid_id_no_rts_shp):
                id_index = [grid_ids.index(id) for id in grid_id_no_rts_shp]
                grid_polys = [grid_polys[idx] for idx in id_index]
            #
            rts_shp_folders = select_rts_map_demDiff_headwall_grids(
                yolo_result_dir,
                dem_subsidence_dir,
                grid_headwall_dir,
                grid_polys,
                grid_id_no_rts_shp,
                grid_base_name,
                process_num=process_num)
    else:
        # processing the selection for two input shapefile
        yolo_box_shp = args[0]
        dem_subsidence_shp = args[1]
        print('polygon group 1:', yolo_box_shp)
        print('polygon group 2:', dem_subsidence_shp)

        if options.save_path is not None:
            save_path = options.save_path
        else:
            save_path = io_function.get_name_by_adding_tail(
                yolo_box_shp, 'select')

        select_polygons_overlap_others_in_group2(yolo_box_shp,
                                                 dem_subsidence_shp,
                                                 save_path,
                                                 buffer_size=buffer_size,
                                                 process_num=process_num)

    pass
def main(options, args):
    b_mosaic_ArcticDEM = options.b_mosaic_ArcticDEM
    process_num = options.process_num
    product_list = args  # subset of [slope, slope_8bit, hillshade, tpi]
    print('Will produce products includes: %s' % (product_list))

    if b_mosaic_ArcticDEM:
        print('Input is the mosaic version of AricticDEM')
        from dem_common import arcticDEM_tile_reg_tif_dir,arcticDEM_tile_hillshade_dir,arcticDEM_tile_slope_8bit_dir,\
            arcticDEM_tile_slope_dir,arcticDEM_tile_tpi_8bit_dir
        arcticDEM_reg_tif_dir = arcticDEM_tile_reg_tif_dir
        arcticDEM_hillshade_dir = arcticDEM_tile_hillshade_dir
        arcticDEM_slope_8bit_dir = arcticDEM_tile_slope_8bit_dir
        arcticDEM_slope_dir = arcticDEM_tile_slope_dir
        arcticDEM_tpi_8bit_dir = arcticDEM_tile_tpi_8bit_dir
        dem_pattern = '*reg_dem.tif'
    else:
        from dem_common import arcticDEM_reg_tif_dir, dem_hillshade_dir, dem_slope_dir, dem_slope_8bit_dir, dem_tpi_8bit_dir
        arcticDEM_reg_tif_dir = arcticDEM_reg_tif_dir
        arcticDEM_hillshade_dir = dem_hillshade_dir
        arcticDEM_slope_8bit_dir = dem_slope_8bit_dir
        arcticDEM_slope_dir = dem_slope_dir
        arcticDEM_tpi_8bit_dir = dem_tpi_8bit_dir
        dem_pattern = '*dem_reg.tif'

    basic.setlogfile('log_dem_to_slope8bit_hillshade.txt')

    if os.path.isdir(arcticDEM_slope_8bit_dir) is False:
        io_function.mkdir(arcticDEM_slope_8bit_dir)
    if os.path.isdir(arcticDEM_hillshade_dir) is False:
        io_function.mkdir(arcticDEM_hillshade_dir)
    if os.path.isdir(arcticDEM_tpi_8bit_dir) is False:
        io_function.mkdir(arcticDEM_tpi_8bit_dir)
    if os.path.isdir(arcticDEM_slope_dir) is False:
        io_function.mkdir(arcticDEM_slope_dir)

    failed_tifs = []

    dem_reg_list = io_function.get_file_list_by_pattern(
        arcticDEM_reg_tif_dir, dem_pattern)
    count = len(dem_reg_list)
    print('Find %d DEM in %s, with pattern: %s' %
          (count, arcticDEM_reg_tif_dir, dem_pattern))
    if process_num == 1:
        for idx, tif in enumerate(dem_reg_list):
            res = process_one_dem(idx, count, tif, product_list,
                                  arcticDEM_slope_dir,
                                  arcticDEM_slope_8bit_dir,
                                  arcticDEM_hillshade_dir,
                                  arcticDEM_tpi_8bit_dir)
            if res is not True:
                failed_tifs.append(res)
    else:
        parameters_list = [(idx, count, tif, product_list, arcticDEM_slope_dir,
                            arcticDEM_slope_8bit_dir, arcticDEM_hillshade_dir,
                            arcticDEM_tpi_8bit_dir)
                           for idx, tif in enumerate(dem_reg_list)]
        theadPool = Pool(process_num)
        results = theadPool.starmap(process_one_dem, parameters_list)
        for res in results:
            if res is not True:
                failed_tifs.append(res)
        theadPool.close()

    with open('to_hillshade_slope8bit_failed_cases.txt', 'w') as f_obj:
        for item in failed_tifs:
            f_obj.writelines(item + '\n')
def main(options, args):
    b_mosaic_ArcticDEM = options.b_mosaic_ArcticDEM
    process_num = options.process_num

    if b_mosaic_ArcticDEM:
        print('Input is the mosaic version of AricticDEM')
        arcticDEM_hillshade_dir = os.path.join(ArcticDEM_tmp_dir,
                                               'arcticdem_mosaic_hillshade')
        arcticDEM_slope_8bit_dir = os.path.join(ArcticDEM_tmp_dir,
                                                'arcticdem_mosaic_slope_8bit')
        arcticDEM_tpi_8bit_dir = os.path.join(ArcticDEM_tmp_dir,
                                              'arcticdem_mosaic_tpi_8bit')
        file_pattern = '*.tif'
        arcticDEM_topo_rgb_dir = os.path.join(ArcticDEM_tmp_dir,
                                              'arcticdem_mosaic_topoRGB_8bit')
    else:
        arcticDEM_hillshade_dir = os.path.join(ArcticDEM_tmp_dir,
                                               'dem_hillshade')
        arcticDEM_slope_8bit_dir = os.path.join(ArcticDEM_tmp_dir,
                                                'dem_slope_8bit')
        arcticDEM_tpi_8bit_dir = os.path.join(ArcticDEM_tmp_dir,
                                              'dem_tpi_8bit')
        file_pattern = '*.tif'
        arcticDEM_topo_rgb_dir = os.path.join(ArcticDEM_tmp_dir,
                                              'dem_topoRGB_8bit')

    basic.setlogfile('log_compose_dem_products.txt')

    if os.path.isdir(arcticDEM_topo_rgb_dir) is False:
        io_function.mkdir(arcticDEM_topo_rgb_dir)

    failed_tifs = []

    hillshade_list = io_function.get_file_list_by_pattern(
        arcticDEM_hillshade_dir, file_pattern)
    count = len(hillshade_list)
    print('Find %d Hillshade in %s, with pattern: %s' %
          (count, arcticDEM_hillshade_dir, file_pattern))
    if process_num == 1:
        for idx, tif in enumerate(hillshade_list):
            res = compose_rgb_from_hillshade_slope_tpi(
                idx, count, tif, arcticDEM_slope_8bit_dir,
                arcticDEM_tpi_8bit_dir, arcticDEM_topo_rgb_dir)
            if res is not True:
                failed_tifs.append(res)
    else:
        parameters_list = [(idx, count, tif, arcticDEM_slope_8bit_dir,
                            arcticDEM_tpi_8bit_dir, arcticDEM_topo_rgb_dir)
                           for idx, tif in enumerate(hillshade_list)]
        theadPool = Pool(process_num)
        results = theadPool.starmap(compose_rgb_from_hillshade_slope_tpi,
                                    parameters_list)
        for res in results:
            if res is not True:
                failed_tifs.append(res)
        theadPool.close()

    with open('to_rgb_from_hillshade_slope8bit_failed_cases.txt',
              'w') as f_obj:
        for item in failed_tifs:
            f_obj.writelines(item + '\n')
Beispiel #22
0
        "--cloud_cover",
        action="store",
        dest="cloud_cover",
        type=float,
        help=
        "the could cover threshold, only accept images with cloud cover less than the threshold"
    )
    parser.add_option("-i",
                      "--item_types",
                      action="store",
                      dest="item_types",
                      default='PSScene4Band',
                      help="the item types, e.g., PSScene4Band,PSOrthoTile")
    parser.add_option(
        "-a",
        "--planet_account",
        action="store",
        dest="planet_account",
        default='*****@*****.**',
        help="planet email account, e.g., [email protected]")

    (options, args) = parser.parse_args()
    if len(sys.argv) < 2 or len(args) < 1:
        parser.print_help()
        sys.exit(2)

    basic.setlogfile('download_planet_images_%s.log' %
                     str(datetime.date(datetime.now())))

    main(options, args)
    )

    parser.add_option(
        "-t",
        "--istraining",
        action="store_true",
        dest="istraining",
        default=False,
        help="to indicate the script will perform training process")

    parser.add_option("-s",
                      "--shape_train",
                      action="store",
                      dest="polygon_train",
                      help="the shape file containing polygons for training")

    parser.add_option("-o",
                      "--output",
                      action="store",
                      dest="output",
                      help="the output file path")

    (options, args) = parser.parse_args()
    if len(sys.argv) < 2:
        parser.print_help()
        sys.exit(2)

    basic.setlogfile('planet_svm_log.txt')

    main(options, args)
    usage = "usage: %prog [options]  input_image "
    parser = OptionParser(usage=usage, version="1.0 2019-1-4")
    parser.description = 'Introduction: pixel-based image classification based on random forest classifier'

    parser.add_option("-p", "--ispreprocess",
                      action="store_true", dest="ispreprocess", default=False,
                      help="to indicate the script will perform pre-processing, if this set, istraining will be ignored")

    parser.add_option("-t", "--istraining",
                      action="store_true", dest="istraining", default=False,
                      help="to indicate the script will perform training process")

    parser.add_option("-s", "--shape_train",
                      action="store", dest="polygon_train",
                      help="the shape file containing polygons for training")


    parser.add_option("-o", "--output",
                      action="store", dest="output",
                      help="the output file path")

    (options, args) = parser.parse_args()
    if len(sys.argv) < 2:
        parser.print_help()
        sys.exit(2)

    basic.setlogfile('RandomForest_log.txt')

    main(options, args)

def main(options, args):

    print(
        "%s : prediction using the trained model (run parallel if use multiple GPUs) "
        % os.path.basename(sys.argv[0]))
    machine_name = os.uname()[1]
    start_time = datetime.datetime.now()

    para_file = args[0]
    if os.path.isfile(para_file) is False:
        raise IOError('File %s not exists in current folder: %s' %
                      (para_file, os.getcwd()))

    basic.setlogfile('parallel_predict_Log.txt')

    deeplab_inf_script = os.path.join(code_dir, 'deeplabBased',
                                      'deeplab_inference.py')
    network_setting_ini = parameters.get_string_parameters(
        para_file, 'network_setting_ini')

    global tf1x_python
    tf1x_python = parameters.get_file_path_parameters(network_setting_ini,
                                                      'tf1x_python')

    trained_model = options.trained_model

    outdir = parameters.get_directory(para_file, 'inf_output_dir')

    # remove previous results (let user remove this folder manually or in exe.sh folder)
    io_function.mkdir(outdir)

    # get name of inference areas
    multi_inf_regions = parameters.get_string_list_parameters(
        para_file, 'inference_regions')

    # max_parallel_inf_task = parameters.get_digit_parameters(para_file,'max_parallel_inf_task','int')

    b_use_multiGPUs = parameters.get_bool_parameters(para_file,
                                                     'b_use_multiGPUs')

    # loop each inference regions
    sub_tasks = []
    for area_idx, area_ini in enumerate(multi_inf_regions):

        area_name = parameters.get_string_parameters(area_ini, 'area_name')
        area_remark = parameters.get_string_parameters(area_ini, 'area_remark')
        area_time = parameters.get_string_parameters(area_ini, 'area_time')

        inf_image_dir = parameters.get_directory(area_ini, 'inf_image_dir')

        # it is ok consider a file name as pattern and pass it the following functions to get file list
        inf_image_or_pattern = parameters.get_string_parameters(
            area_ini, 'inf_image_or_pattern')

        inf_img_list = io_function.get_file_list_by_pattern(
            inf_image_dir, inf_image_or_pattern)
        img_count = len(inf_img_list)
        if img_count < 1:
            raise ValueError(
                'No image for inference, please check inf_image_dir and inf_image_or_pattern in %s'
                % area_ini)

        area_save_dir = os.path.join(
            outdir, area_name + '_' + area_remark + '_' + area_time)
        io_function.mkdir(area_save_dir)

        # parallel inference images for this area
        CUDA_VISIBLE_DEVICES = []
        if 'CUDA_VISIBLE_DEVICES' in os.environ.keys():
            CUDA_VISIBLE_DEVICES = [
                int(item.strip())
                for item in os.environ['CUDA_VISIBLE_DEVICES'].split(',')
            ]
        idx = 0
        while idx < img_count:

            if b_use_multiGPUs:
                # get available GPUs  # https://github.com/anderskm/gputil
                deviceIDs = GPUtil.getAvailable(order='first',
                                                limit=100,
                                                maxLoad=0.5,
                                                maxMemory=0.5,
                                                includeNan=False,
                                                excludeID=[],
                                                excludeUUID=[])
                # only use the one in CUDA_VISIBLE_DEVICES
                if len(CUDA_VISIBLE_DEVICES) > 0:
                    deviceIDs = [
                        item for item in deviceIDs
                        if item in CUDA_VISIBLE_DEVICES
                    ]
                    basic.outputlogMessage('on ' + machine_name +
                                           ', available GPUs:' +
                                           str(deviceIDs) +
                                           ', among visible ones:' +
                                           str(CUDA_VISIBLE_DEVICES))
                else:
                    basic.outputlogMessage('on ' + machine_name +
                                           ', available GPUs:' +
                                           str(deviceIDs))

                if len(deviceIDs) < 1:
                    time.sleep(
                        60
                    )  # wait one minute, then check the available GPUs again
                    continue
                # set only the first available visible
                gpuid = deviceIDs[0]
                basic.outputlogMessage(
                    '%d: predict image %s on GPU %d of %s' %
                    (idx, inf_img_list[idx], gpuid, machine_name))
            else:
                gpuid = None
                basic.outputlogMessage('%d: predict image %s on %s' %
                                       (idx, inf_img_list[idx], machine_name))

            # run inference
            img_save_dir = os.path.join(area_save_dir, 'I%d' % idx)
            inf_list_file = os.path.join(area_save_dir, '%d.txt' % idx)

            # if it already exist, then skip
            if os.path.isdir(img_save_dir) and is_file_exist_in_folder(
                    img_save_dir):
                basic.outputlogMessage(
                    'folder of %dth image (%s) already exist, '
                    'it has been predicted or is being predicted' %
                    (idx, inf_img_list[idx]))
                idx += 1
                continue

            with open(inf_list_file, 'w') as inf_obj:
                inf_obj.writelines(inf_img_list[idx] + '\n')

            sub_process = Process(target=predict_one_image_deeplab,
                                  args=(deeplab_inf_script, para_file,
                                        network_setting_ini, img_save_dir,
                                        inf_list_file, gpuid, trained_model))
            sub_process.start()
            sub_tasks.append(sub_process)

            if b_use_multiGPUs is False:
                # wait until previous one finished
                while sub_process.is_alive():
                    time.sleep(5)

            idx += 1

            # wait until predicted image patches exist or exceed 20 minutes
            time0 = time.time()
            elapsed_time = time.time() - time0
            while elapsed_time < 20 * 60:
                elapsed_time = time.time() - time0
                file_exist = is_file_exist_in_folder(img_save_dir)
                if file_exist is True or sub_process.is_alive() is False:
                    break
                else:
                    time.sleep(5)

            if sub_process.exitcode is not None and sub_process.exitcode != 0:
                sys.exit(1)

            # if 'chpc' in machine_name:
            #     time.sleep(60)  # wait 60 second on ITSC services
            # else:
            #     time.sleep(10)

    # check all the tasks already finished
    while b_all_task_finish(sub_tasks) is False:
        basic.outputlogMessage('wait all tasks to finish')
        time.sleep(60)

    end_time = datetime.datetime.now()

    diff_time = end_time - start_time
    out_str = "%s: time cost of total parallel inference on %s: %d seconds" % (
        str(end_time), machine_name, diff_time.seconds)
    basic.outputlogMessage(out_str)
    with open("time_cost.txt", 'a') as t_obj:
        t_obj.writelines(out_str + '\n')
Beispiel #26
0
from skimage import io
# import skimage.transform

import numpy as np

HOME = os.path.expanduser('~')

# path of DeeplabforRS
codes_dir2 = HOME + '/codes/PycharmProjects/DeeplabforRS'
sys.path.insert(0, codes_dir2)
import parameters

import basic_src.basic as basic
import basic_src.io_function as io_function

basic.setlogfile('log_data_augmentation.txt')

# will be update in the main function
num_classes = 0


def remove_unexpected_ids(img_data, img_name):
    '''
    remove unexpected ids after augmentation, it will modify the numpy array
    :param img_data: numpy array of image, should be one band
    :param img_name: file name, help for debug
    :return: True,
    '''

    unique_value = np.unique(img_data)
    if len(unique_value) > num_classes:
    #                   action="store", dest="end_date",
    #                   help="the end date for inquiry, with format year-month-day, e.g., 2018-05-23")
    parser.add_option("-e", "--extent_shp",
                      action="store", dest="extent_shp",
                      help="the path for extent, shapefile (EPSG:4326)")

    parser.add_option("-c", "--cloud_cover",
                      action="store", dest="cloud_cover", type=float,default=0.3,
                      help="the could cover threshold, only accept images with cloud cover less than the threshold")
    parser.add_option("-g", "--group_date",
                      action="store_true", dest="group_date",default=False,
                      help="true to group image if their acquisition date is the same")
    parser.add_option("-x", "--save_xlsx_path",
                      action="store", dest="save_xlsx_path",
                      help="save the sence lists to xlsx file, if this is set, save_meta_shapefile will be ignored")
    parser.add_option("-m", "--save_meta_shapefile",
                      action="store", dest="save_meta_shapefile",
                      help="the path for saving meta and extent of downloaded planet images")



    (options, args) = parser.parse_args()
    if len(sys.argv) < 2 or len(args) < 1:
        parser.print_help()
        sys.exit(2)

    basic.setlogfile('get_planet_image_list_%s.log'%str(datetime.date(datetime.now())))

    main(options, args)

Beispiel #28
0
    basic.outputlogMessage(
        'Finish removing polygons using multi-temporal mapping results')


if __name__ == "__main__":
    usage = "usage: %prog [options] shape_file_dir file_pattern "
    parser = OptionParser(usage=usage, version="1.0 2020-02-23")
    parser.description = 'Introduction: remove polygons (e.g., non-active polygons) based on multi-temporal polygons '

    parser.add_option("-p",
                      "--para",
                      action="store",
                      dest="para_file",
                      help="the parameters file")

    (options, args) = parser.parse_args()
    if len(sys.argv) < 2:
        parser.print_help()
        sys.exit(2)

    # set parameters files
    if options.para_file is None:
        print('error, no parameters file')
        parser.print_help()
        sys.exit(2)
    else:
        parameters.set_saved_parafile_path(options.para_file)

    basic.setlogfile('polygons_remove_nonActiveRTS.log')

    main(options, args)