Exemplo n.º 1
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()
    
    # get config
    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
    config = Config(**cfg)
    
    ###################
    # set paths
    path_images_8bit_train = os.path.join(config.path_data_root, config.train_data_refined_dir_ims)
    # path_images_8bit_train = os.path.join(config.path_data_root, config.train_data_refined_dir, 'images')
    print ("gen+folds.py: path_images_8bit_train:", path_images_8bit_train)
    files = os.listdir(path_images_8bit_train)
    print ("files[:10]:", files[:10])
    weight_save_path = os.path.join(config.path_results_root, 'weights', config.save_weights_dir)
    os.makedirs(weight_save_path, exist_ok=True)
    folds_save_path = os.path.join(weight_save_path, config.folds_file_name)
    
    if os.path.exists(folds_save_path):
        print("folds csv already exists:", folds_save_path)
        return
        
    else:
        print ("folds_save_path:", folds_save_path)

        # # set values
        # if config.num_channels == 3:
        #     image_format_path = 'RGB-PanSharpen'
        # else:
        #     image_format_path = 'MUL-PanSharpen'
        # imfile_prefix = image_format_path + '_'
        ###################    

        shuffle(files)
    
        s = {k.split('_')[0] for k in files}
        d = {k: [v for v in files] for k in s}
    
        folds = {}
    
        if config.num_folds == 1:
            nfolds = int(np.rint(1. / config.default_val_perc))
        else:
            nfolds = config.num_folds
    
        idx = 0
        for v in d.values():
            for val in v:
                folds[val] = idx % nfolds
                idx+=1
    
        df = pd.Series(folds, name='fold')
        df.to_csv(folds_save_path, header=['fold'], index=True)
def execute():
#    # if using argparse
#    parser = argparse.ArgumentParser()
#    parser.add_argument('--folds_save_dir', type=str, default='/raid/local/src/apls/albu_inference_mod/results',
#                            help="path to predicted folds")
#    parser.add_argument('--out_dir', type=str, default='/raid/local/src/apls/albu_inference_mod/results',
#                            help="path to merged predictions")
#    args = parser.parse_args()
#    #out_dir = os.path.join(os.path.dirname(root), 'merged')
#    os.makedirs(args.out_dir, exist_ok=True)  #os.path.join(root, 'merged'), exist_ok=True)
#    
#    t0 = time.time()
#    merge_tiffs(args.folds_save_dir, args.out_dir)
#    t1 = time.time()
#    print ("Time to merge", len(os.listdir(args.folds_save_dir)), "files:", t1-t0, "seconds")
#    
#    # compress original folds
#    output_filename = args.folds_save_dir
#    print ("output_filename:", output_filename)
#    shutil.make_archive(output_filename, 'gztar', args.folds_save_dir) #'zip', res_dir)
#    # remove folds
#    #shutil.rmtree(args.folds_save_dir, ignore_errors=True)
#    
    
    
    # if using config instead of argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()
    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)
        
    folds_dir = os.path.join(config.path_results_root, config.test_results_dir, config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, config.test_results_dir, config.merged_dir)
    
    verbose = False
    print ("folds_save_dir used in merge_preds():", folds_dir)

    os.makedirs(merge_dir, exist_ok=True)  #os.path.join(root, 'merged'), exist_ok=True)

    t0 = time.time()
    merge_tiffs(folds_dir, merge_dir, 
            num_classes=config.num_classes, verbose=verbose)
    t1 = time.time()
Exemplo n.º 3
0
def main():
    
    '''See _arr_slicing_speed.ipynb for better tests'''
    #global #logger1
    
    
    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()

    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    ##########
    # Variables
    t0 = time.time()
    percentile = 85
    dx, dy = 4, 4   # nearest neighbors patch size
    min_z = 128     # min z value to consider a hit
    #N_plots = 20
    figsize = (12, 12)

    # best colors
    node_color, edge_color = '#cc9900', '#ffbf00'  # gold
    #node_color, edge_color = 'l#4dff4d', '#00e600' # green

    default_node_size = 2 #0.15 #4
    plot_width_key, plot_width_mult = 'inferred_speed_mph', 0.085 # 0.08  # variable width
    #width_key, width_mult = 4, 1   # constant width
    ## fix here
    if config.num_classes > 4:
        use_totband = True
    else:
        use_totband = False
        
    save_shapefiles = True
    use_weighted_mean = True
    variable_edge_speed = False
    run_08a_plot_graph_plus_im = False
    verbose = False
    ##########    
 
    
    # input dirs
    res_root_dir = os.path.join(config.path_results_root, config.test_results_dir)
    #path_images = os.path.join(config.path_data_root, config.test_data_refined_dir)
    graph_dir = os.path.join(res_root_dir, config.graph_dir)
    # get mask location, check if we are stitching together large images or not
    merge_dir = os.path.join(config.path_results_root, 
                             config.test_results_dir, 
                             config.merged_dir)
    mask_prefix = ''
    mask_dir = merge_dir
            
    # output dirs
    graph_speed_dir = os.path.join(res_root_dir, config.graph_dir + '_speed')
    os.makedirs(graph_speed_dir, exist_ok=True)

    # speed conversion dataframes (see _speed_data_prep.ipynb)
    speed_conversion_file_contin = os.path.join(config.path_data_root, 
                                                'SN5_roads_train_speed_conversion_contin.csv')
    speed_conversion_file_binned = os.path.join(config.path_data_root, 
                                                'SN5_roads_train_speed_conversion_binned.csv')
    
    # load conversion file
    # get the conversion diction between pixel mask values and road speed (mph)
    if config.num_classes > 1:
        conv_df, conv_dict \
            = load_speed_conversion_dict_binned(speed_conversion_file_binned, num_classes=config.num_classes)
    else:
         conv_df, conv_dict \
            = load_speed_conversion_dict_contin(speed_conversion_file_contin)
    #logger1.info("speed conv_dict: " + str(conv_dict))
    print("speed conv_dict: " + str(conv_dict))
    
    # Add travel time to entire dir
    add_travel_time_dir(graph_dir, mask_dir, conv_dict, graph_speed_dir,
                      min_z=min_z, 
                      dx=dx, dy=dy,
                      percentile=percentile,
                      use_totband=use_totband, 
                      use_weighted_mean=use_weighted_mean,
                      variable_edge_speed=variable_edge_speed,
                      mask_prefix=mask_prefix,
                      save_shapefiles=save_shapefiles,
                      verbose=verbose)
    
    t1 = time.time()
Exemplo n.º 4
0
def main():
    '''See _arr_slicing_speed.ipynb for better tests'''
    global logger1

    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()

    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    ##########
    # Variables
    t0 = time.time()
    percentile = 85
    dx, dy = 4, 4  # nearest neighbors patch size
    min_z = 128  # min z value to consider a hit
    N_plots = 20
    figsize = (12, 12)

    # best colors
    node_color, edge_color = '#cc9900', '#ffbf00'  # gold

    default_node_size = 2
    plot_width_key, plot_width_mult = 'inferred_speed_mph', 0.085  # 0.08  # variable width
    #width_key, width_mult = 4, 1   # constant width
    if config.num_classes == 8:
        use_totband = True
    else:
        use_totband = False

    save_shapefiles = True
    use_weighted_mean = True
    variable_edge_speed = False
    run_08a_plot_graph_plus_im = False
    verbose = False
    ##########

    # input dirs
    res_root_dir = os.path.join(config.path_results_root,
                                config.test_results_dir)
    #path_images = os.path.join(config.path_data_root, config.test_data_refined_dir)
    graph_dir = os.path.join(res_root_dir, config.graph_dir)
    # get mask location, check if we are stitching together large images or not
    out_dir_mask_norm = os.path.join(config.path_results_root,
                                     config.test_results_dir,
                                     config.stitched_dir_norm)
    folds_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.merged_dir)
    mask_prefix = ''
    if os.path.exists(out_dir_mask_norm):
        mask_dir = out_dir_mask_norm
    else:
        if config.num_folds > 1:
            mask_dir = merge_dir
        else:
            mask_dir = folds_dir
            mask_prefix = 'fold0_'

    log_file = os.path.join(res_root_dir, 'skeleton_speed.log')
    console, logger1 = make_logger.make_logger(log_file, logger_name='log')

    # output dirs
    graph_speed_dir = os.path.join(res_root_dir, config.graph_dir + '_speed')
    os.makedirs(graph_speed_dir, exist_ok=True)
    logger1.info("graph_speed_dir: " + graph_speed_dir)

    # speed conversion dataframes (see _speed_data_prep.ipynb)
    speed_conversion_file = config.speed_conversion_file
    # load conversion file
    # get the conversion diction between pixel mask values and road speed (mph)
    if config.num_classes > 1:
        conv_df, conv_dict \
            = load_speed_conversion_dict_binned(speed_conversion_file)
    else:
        conv_df, conv_dict \
           = load_speed_conversion_dict_contin(speed_conversion_file)
    logger1.info("speed conv_dict: " + str(conv_dict))

    # Add travel time to entire dir
    add_travel_time_dir(graph_dir,
                        mask_dir,
                        conv_dict,
                        graph_speed_dir,
                        min_z=min_z,
                        dx=dx,
                        dy=dy,
                        percentile=percentile,
                        use_totband=use_totband,
                        use_weighted_mean=use_weighted_mean,
                        variable_edge_speed=variable_edge_speed,
                        mask_prefix=mask_prefix,
                        save_shapefiles=save_shapefiles,
                        verbose=verbose)

    t1 = time.time()
    logger1.info(
        "Time to execute add_travel_time_dir(): {x} seconds".format(x=t1 - t0))

    # plot a few
    if N_plots > 0:

        logger1.info("\nPlot a few...")
        # define output dir
        graph_speed_plots_dir = os.path.join(res_root_dir,
                                             config.graph_dir + '_speed_plots')
        os.makedirs(graph_speed_plots_dir, exist_ok=True)

        # plot graph on image (with width proportional to speed)
        path_images = os.path.join(config.path_data_root,
                                   config.test_data_refined_dir)
        image_list = [z for z in os.listdir(path_images) if z.endswith('tif')]
        if len(image_list) > N_plots:
            image_names = np.random.choice(image_list, N_plots)
        else:
            image_names = sorted(image_list)
        #logger1.info("image_names: " + image_names)

        for i, image_name in enumerate(image_names):
            if i > 1000:
                break

            image_path = os.path.join(path_images, image_name)
            logger1.info("\n\nPlotting: " + image_name + "  " + image_path)
            pkl_path = os.path.join(graph_speed_dir,
                                    image_name.split('.')[0] + '.gpickle')
            logger1.info("   pkl_path: " + pkl_path)
            if not os.path.exists(pkl_path):
                logger1.info("    missing pkl: " + pkl_path)
                continue
            G = nx.read_gpickle(pkl_path)
            #if not os.path.exists(image_path)

            figname = os.path.join(graph_speed_plots_dir, image_name)
            _ = plot_graph_on_im_yuge(G,
                                      image_path,
                                      figsize=figsize,
                                      show_endnodes=True,
                                      default_node_size=default_node_size,
                                      width_key=plot_width_key,
                                      width_mult=plot_width_mult,
                                      node_color=node_color,
                                      edge_color=edge_color,
                                      title=image_name,
                                      figname=figname,
                                      verbose=True,
                                      super_verbose=verbose)

    t2 = time.time()
    logger1.info(
        "Time to execute add_travel_time_dir(): {x} seconds".format(x=t1 - t0))
    logger1.info("Time to make plots: {x} seconds".format(x=t2 - t1))
    logger1.info("Total time: {x} seconds".format(x=t2 - t0))
Exemplo n.º 5
0
def main():

    global logger1

    # min_subgraph_length_pix = 300
    min_spur_length_m = 0.001  # default = 5
    local = False  #True
    verbose = False
    super_verbose = False
    make_plots = False  #True
    save_shapefiles = True  #False
    pickle_protocol = 4  # 4 is most recent, python 2.7 can't read 4

    # local
    if local:
        albu_path = '/Users/avanetten/Documents/cosmiq/apls/albu_inference_mod'
        path_images = '/Users/avanetten/Documents/cosmiq/spacenet/data/spacenetv2/AOI_2_Vegas_Test/400m/RGB-PanSharpen'
        res_root_dir = os.path.join(
            albu_path, 'results/2m_4fold_512_30e_d0.2_g0.2_AOI_2_Vegas_Test')
        csv_file = os.path.join(res_root_dir, 'wkt_submission.csv')
        graph_dir = os.path.join(res_root_dir, 'graphs')
        log_file = os.path.join(res_root_dir, 'wkt_to_G.log')
        #os.makedirs(graph_dir, exist_ok=True)
        try:
            os.makedirs(graph_dir)
        except:
            pass

    # deployed on dev box
    else:
        parser = argparse.ArgumentParser()
        parser.add_argument('config_path')
        args = parser.parse_args()
        with open(args.config_path, 'r') as f:
            cfg = json.load(f)
            config = Config(**cfg)

        # outut files
        res_root_dir = os.path.join(config.path_results_root,
                                    config.test_results_dir)
        path_images = os.path.join(config.path_data_root,
                                   config.test_data_refined_dir)
        csv_file = os.path.join(res_root_dir, config.wkt_submission)
        graph_dir = os.path.join(res_root_dir, config.graph_dir)
        log_file = os.path.join(res_root_dir, 'wkt_to_G.log')
        os.makedirs(graph_dir, exist_ok=True)

        min_subgraph_length_pix = config.min_subgraph_length_pix
        min_spur_length_m = config.min_spur_length_m

    console, logger1 = make_logger.make_logger(log_file, logger_name='log')
    #    ###############################################################################
    #    # https://docs.python.org/3/howto/logging-cookbook.html#logging-to-multiple-destinations
    #    # set up logging to file - see previous section for more details
    #    logging.basicConfig(level=logging.DEBUG,
    #                        format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
    #                        datefmt='%m-%d %H:%M',
    #                        filename=log_file,
    #                        filemode='w')
    #    # define a Handler which writes INFO messages or higher to the sys.stderr
    #    console = logging.StreamHandler()
    #    console.setLevel(logging.INFO)
    #    # set a format which is simpler for console use
    #    formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
    #    #formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
    #    # tell the handler to use this format
    #    console.setFormatter(formatter)
    #    # add the handler to the root logger
    #    logging.getLogger('').addHandler(console)
    #    logger1 = logging.getLogger('log')
    #    logger1.info("log file: {x}".format(x=log_file))
    #    ###############################################################################

    #    csv_file = os.path.join(res_root_dir, 'merged_wkt_list.csv')
    #    graph_dir = os.path.join(res_root_dir, 'graphs')
    #    #os.makedirs(graph_dir, exist_ok=True)
    #    try:
    #        os.makedirs(graph_dir)
    #    except:
    #        pass

    # read in wkt list
    logger1.info("df_wkt at: {}".format(csv_file))
    #print ("df_wkt at:", csv_file)
    df_wkt = pd.read_csv(csv_file)
    # columns=['ImageId', 'WKT_Pix'])

    # iterate through image ids and create graphs
    t0 = time.time()
    image_ids = np.sort(np.unique(df_wkt['ImageId']))
    #print("image_ids:", image_ids)
    print("len image_ids:", len(image_ids))

    for i, image_id in enumerate(image_ids):

        #if image_id != 'AOI_2_Vegas_img586':
        #    continue
        out_file = os.path.join(graph_dir, image_id.split('.')[0] + '.gpickle')

        logger1.info("\n{x} / {y}, {z}".format(x=i + 1,
                                               y=len(image_ids),
                                               z=image_id))
        #print ("\n")
        #print (i, "/", len(image_ids), image_id)

        # for geo referencing, im_file should be the raw image
        if config.num_channels == 3:
            im_file = os.path.join(path_images,
                                   'RGB-PanSharpen_' + image_id + '.tif')
        else:
            im_file = os.path.join(path_images,
                                   'MUL-PanSharpen_' + image_id + '.tif')
        #im_file = os.path.join(path_images, image_id)
        if not os.path.exists(im_file):
            im_file = os.path.join(path_images, image_id + '.tif')
        if not os.path.exists(im_file):
            f = [f for f in os.listdir(path_images) if image_id in f][0]
            im_file = os.path.join(path_images, f)
        #print('im_file:', im_file)

        # filter
        df_filt = df_wkt['WKT_Pix'][df_wkt['ImageId'] == image_id]
        wkt_list = df_filt.values
        #wkt_list = [z[1] for z in df_filt_vals]

        # print a few values
        logger1.info("\n{x} / {y}, num linestrings: {z}".format(
            x=i + 1, y=len(image_ids), z=len(wkt_list)))
        #print ("\n", i, "/", len(image_ids), "num linestrings:", len(wkt_list))
        if verbose:
            print("image_file:", im_file)
            print("  wkt_list[:2]", wkt_list[:2])

        if (len(wkt_list) == 0) or (wkt_list[0] == 'LINESTRING EMPTY'):
            G = nx.MultiDiGraph()
            nx.write_gpickle(G, out_file, protocol=pickle_protocol)
            continue

        # create graph
        t1 = time.time()
        G = wkt_to_G(wkt_list,
                     im_file=im_file,
                     min_subgraph_length_pix=min_subgraph_length_pix,
                     min_spur_length_m=min_spur_length_m,
                     verbose=super_verbose)
        t2 = time.time()
        if verbose:
            logger1.info("Time to create graph: {} seconds".format(t2 - t1))
            #print ("Time to create graph:", t2-t1, "seconds")

        if len(G.nodes()) == 0 or len(G.edges()) == 0:
            nx.write_gpickle(G, out_file, protocol=pickle_protocol)
            continue

        # print a node
        node = list(G.nodes())[-1]
        #print (node, "random node props:", G.nodes[node])
        # print an edge
        edge_tmp = list(G.edges())[-1]
        #print (edge_tmp, "random edge props:", G.edges([edge_tmp[0], edge_tmp[1]])) #G.edge[edge_tmp[0]][edge_tmp[1]])
        print(edge_tmp, "random edge props:",
              G.get_edge_data(edge_tmp[0], edge_tmp[1]))

        # save graph
        logger1.info("Saving graph to directory: {}".format(graph_dir))
        #print ("Saving graph to directory:", graph_dir)
        nx.write_gpickle(G, out_file, protocol=pickle_protocol)

        # save shapefile as well?
        if save_shapefiles:
            logger1.info("Saving shapefile to directory: {}".format(graph_dir))
            try:
                ox.save_graph_shapefile(G,
                                        filename=image_id.split('.')[0],
                                        folder=graph_dir,
                                        encoding='utf-8')
            except:
                print("Cannot save shapefile...")
            #out_file2 = os.path.join(graph_dir, image_id.split('.')[0] + '.graphml')
            #ox.save_graphml(G, image_id.split('.')[0] + '.graphml', folder=graph_dir)

        # plot, if desired
        if make_plots:
            print("Plotting graph...")
            outfile_plot = os.path.join(graph_dir, image_id)
            print("outfile_plot:", outfile_plot)
            ox.plot_graph(
                G,
                fig_height=9,
                fig_width=9,
                #save=True, filename=outfile_plot, margin=0.01)
            )
            #plt.tight_layout()
            plt.savefig(outfile_plot, dpi=400)

        #if i > 30:
        #    break

    tf = time.time()
    logger1.info("Time to run wkt_to_G.py: {} seconds".format(tf - t0))
    )
    parser.add_argument(
        '--out-csv',
        type=str,
        required=True,
        help='Where to save csv',
    )
    parser.set_defaults(add_metrics=False, )
    return parser.parse_args()


###############################################################################
if __name__ == "__main__":
    args = parse_args()

    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    root_dir = os.path.join(config.path_results_root, config.test_results_dir)

    weight_keys = ['length', 'travel_time_s']
    verbose = False
    pkl_dir = os.path.join(root_dir, 'graphs_speed')
    output_csv_path = args.out_csv

    df = pkl_dir_to_wkt(pkl_dir,
                        output_csv_path=output_csv_path,
                        weight_keys=weight_keys,
                        verbose=verbose)
def execute():

    # if using config instead of argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()
    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    # nothing to do if only one fold
    if config.num_folds == 1:
        print("num_folds = 1, no need to merge")
        return

    folds_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.merged_dir)

    # make gdal folder?
    merge_dir_gdal = merge_dir + '_gdal'
    #merge_dir_gdal = None

    verbose = False
    #res_dir = config.folds_save_dir
    #res_dir = os.path.join(config.results_dir, config.folder + config.out_suff + '/folds')
    print("folds_save_dir used in merge_preds():", folds_dir)

    out_dir = merge_dir
    os.makedirs(out_dir,
                exist_ok=True)  #os.path.join(root, 'merged'), exist_ok=True)
    # set output dir to: os.path.join(config.results_dir, config.folder + config.out_suff, 'merged')
    print("out_dir used in merge_preds():", out_dir)

    if merge_dir_gdal:
        os.makedirs(merge_dir_gdal, exist_ok=True)

    t0 = time.time()
    merge_tiffs(folds_dir,
                out_dir,
                num_classes=config.num_classes,
                out_dir_gdal=merge_dir_gdal,
                verbose=verbose)
    t1 = time.time()
    print("Time to merge", len(os.listdir(folds_dir)), "files:", t1 - t0,
          "seconds")

    #root = '/results/results'
    #merge_tiffs(os.path.join(root, '2m_4fold_512_30e_d0.2_g0.2_test'))

    print("Compress original folds...")
    output_filename = folds_dir
    #output_filename = os.path.join(config.results_dir, config.folder + config.out_suff + '/folds')
    print("output_filename:", output_filename)
    shutil.make_archive(output_filename, 'gztar', folds_dir)  #'zip', res_dir)
    # remove folds
    shutil.rmtree(folds_dir, ignore_errors=True)

    print("Compress original gdal folds...")
    output_filename = folds_dir + '_gdal'
    if os.path.exists(output_filename):
        #output_filename = os.path.join(config.results_dir, config.folder + config.out_suff + '/folds')
        print("output_filename:", output_filename)
        shutil.make_archive(output_filename, 'gztar',
                            folds_dir + '_gdal')  #'zip', res_dir)
        # remove folds
        shutil.rmtree(folds_dir + '_gdal', ignore_errors=True)
Exemplo n.º 8
0
def main():

    global logger1
    spacenet_naming_convention = False # True
    
    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()

    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    im_prefix = ''
    # check if we are stitching together large images or not
    out_dir_mask_norm = os.path.join(config.path_results_root, 
                                     config.test_results_dir, 
                                     config.stitched_dir_norm)
    folds_dir = os.path.join(config.path_results_root, 
                             config.test_results_dir, 
                             config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, 
                             config.test_results_dir, 
                             config.merged_dir)

    if os.path.exists(out_dir_mask_norm):
        im_dir = out_dir_mask_norm
    else:
        if config.num_folds > 1:
            im_dir = merge_dir
        else:
            im_dir = folds_dir
            im_prefix = 'fold0_'
            
    os.makedirs(im_dir, exist_ok=True)
  
    # outut files
    res_root_dir = os.path.join(config.path_results_root, 
                                config.test_results_dir)
    outfile_csv = os.path.join(res_root_dir, config.wkt_submission)
    #outfile_gpickle = os.path.join(res_root_dir, 'G_sknw.gpickle')
    out_ske_dir = os.path.join(res_root_dir, config.skeleton_dir)  # set to '' to not save
    os.makedirs(out_ske_dir, exist_ok=True)
    if len(config.skeleton_pkl_dir) > 0:
        out_gdir = os.path.join(res_root_dir, config.skeleton_pkl_dir)  # set to '' to not save
        os.makedirs(out_gdir, exist_ok=True)
    else:
        out_gdir = ''
         
    print ("im_dir:", im_dir)
    print ("out_ske_dir:", out_ske_dir)
    print ("out_gdir:", out_gdir)
        
    thresh = config.skeleton_thresh
    min_subgraph_length_pix = config.min_subgraph_length_pix
    
    debug=False
    add_small=True
    fix_borders=True
    img_shape=() #(1300, 1300)
    skel_replicate=5
    skel_clip=2
    img_mult=255
    hole_size=300
    cv2_kernel_close=7
    cv2_kernel_open=7
    #max_out_size=(16000, 8000)  # works fine
    #max_out_size=(8003, 16009)   # works fine
    max_out_size=(2000000, 2000000)
 
    log_file = os.path.join(res_root_dir, 'skeleton.log')
    console, logger1 = make_logger.make_logger(log_file, logger_name='log')
   
    
    print ("Building wkts...")
    t0 = time.time()
    df = build_wkt_dir(im_dir, outfile_csv, out_ske_dir, out_gdir, thresh, #threshes={'2': .3, '3': .3, '4': .3, '5': .2}, 
                debug=debug, add_small=add_small, fix_borders=fix_borders,
                img_shape=img_shape,
                skel_replicate=skel_replicate, skel_clip=skel_clip,
                img_mult=img_mult, hole_size=hole_size,
                min_subgraph_length_pix=min_subgraph_length_pix,
                cv2_kernel_close=cv2_kernel_close, cv2_kernel_open=cv2_kernel_open,
                max_out_size=max_out_size,
                skeleton_band=config.skeleton_band,
                num_classes=config.num_classes,
                im_prefix=im_prefix,
                spacenet_naming_convention=spacenet_naming_convention)        

    print ("len df:", len(df))
    print ("outfile:", outfile_csv)
    t1 = time.time()
    logger1.info("Total time to run build_wkt_dir: {} seconds".format(t1-t0))
Exemplo n.º 9
0
def update_config(config, **kwargs):
    print("Run utils.update_config()...")
    d = config._asdict()
    d.update(**kwargs)
    print("Updated config:", d)
    return Config(**d)
def main():

    global logger1
    spacenet_naming_convention = True #False 
    
    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()

    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    im_prefix = ''
    # check if we are stitching together large images or not
    out_dir_mask_norm = os.path.join(config.path_results_root, 
                                     config.test_results_dir, 
                                     config.stitched_dir_norm)
    folds_dir = os.path.join(config.path_results_root, 
                             config.test_results_dir, 
                             config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, 
                             config.test_results_dir, 
                             config.merged_dir)

    if os.path.exists(out_dir_mask_norm):
        im_dir = out_dir_mask_norm
    else:
        if config.num_folds > 1:
            im_dir = merge_dir
        else:
            im_dir = folds_dir
            im_prefix = 'fold0_'
            
    os.makedirs(im_dir, exist_ok=True)
  
    # outut files
    res_root_dir = os.path.join(config.path_results_root, 
                                config.test_results_dir)
    outfile_csv = os.path.join(res_root_dir, config.wkt_submission)
    #outfile_gpickle = os.path.join(res_root_dir, 'G_sknw.gpickle')
    out_ske_dir = os.path.join(res_root_dir, config.skeleton_dir)  # set to '' to not save
    os.makedirs(out_ske_dir, exist_ok=True)
    if len(config.skeleton_pkl_dir) > 0:
        out_gdir = os.path.join(res_root_dir, config.skeleton_pkl_dir)  # set to '' to not save
        os.makedirs(out_gdir, exist_ok=True)
    else:
        out_gdir = ''
     
#    # use stitched dir if it has been populated
#    if os.path.exists(config.stitched_dir_norm):
#        im_dir = config.stitched_dir_norm
#    else:
#        im_dir = config.merged_dir
#        # merged_dir = os.path.join(config.results_dir, config.folder + config.out_suff, 'merged')
#     
#    # outut files
#    res_root_dir = config.results_dir #os.path.dirname(im_dir)
#    outfile_csv = os.path.join(res_root_dir, 'merged_wkt_list.csv')
#    #outfile_gpickle = os.path.join(res_root_dir, 'G_sknw.gpickle')
#    out_ske_dir = os.path.join(res_root_dir, 'skeleton')  # set to '' to not save
#    os.makedirs(out_ske_dir, exist_ok=True)
#    out_gdir = os.path.join(res_root_dir, 'sknw_gpickle')  # set to '' to not save
#    os.makedirs(out_gdir, exist_ok=True)
    
    #print ("im_dir:", im_dir)
    #print ("out_ske_dir:", out_ske_dir)
    #print ("out_gdir:", out_gdir)
        
    thresh = config.skeleton_thresh
#    # thresholds for each aoi
#    threshes={'2': .3, '3': .3, '4': .3, '5': .2}  
#    thresh = threshes[config.aoi]
    min_subgraph_length_pix = config.min_subgraph_length_pix
    #min_subgraph_length_pix=200
    
    debug=False
    add_small=True
    fix_borders=True
    img_shape=() #(1300, 1300)
    skel_replicate=5
    skel_clip=2
    img_mult=255
    hole_size=300
    cv2_kernel_close=7
    cv2_kernel_open=7
    #max_out_size=(16000, 8000)  # works fine
    #max_out_size=(8003, 16009)   # works fine
    max_out_size=(2000000, 2000000)
 
#    ###############################################################################
#    # https://docs.python.org/3/howto/logging-cookbook.html#logging-to-multiple-destinations
#    # set up logging to file - see previous section for more details
#    res_root_dir = os.path.join(config.path_results_root, config.test_results_dir)
#    log_file = os.path.join(res_root_dir, 'skeleton.log')
#    logging.basicConfig(level=logging.DEBUG,
#                        format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
#                        datefmt='%m-%d %H:%M',
#                        filename=log_file,
#                        filemode='w')
#    # define a Handler which writes INFO messages or higher to the sys.stderr
#    console = logging.StreamHandler()
#    console.setLevel(logging.INFO)
#    # set a format which is simpler for console use
#    formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
#    #formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
#    # tell the handler to use this format
#    console.setFormatter(formatter)
#    # add the handler to the root logger
#    logging.getLogger('').addHandler(console)
#    logger1 = logging.getLogger('log')
#    logger1.info("log file: {x}".format(x=log_file))
#    ###############################################################################  
    log_file = os.path.join(res_root_dir, 'skeleton.log')
    console, logger1 = make_logger.make_logger(log_file, logger_name='log')
   
    
    #print ("Building wkts...")
    t0 = time.time()
    df = build_wkt_dir(im_dir, outfile_csv, out_ske_dir, out_gdir, thresh, #threshes={'2': .3, '3': .3, '4': .3, '5': .2}, 
                debug=debug, add_small=add_small, fix_borders=fix_borders,
                img_shape=img_shape,
                skel_replicate=skel_replicate, skel_clip=skel_clip,
                img_mult=img_mult, hole_size=hole_size,
                min_subgraph_length_pix=min_subgraph_length_pix,
                cv2_kernel_close=cv2_kernel_close, cv2_kernel_open=cv2_kernel_open,
                max_out_size=max_out_size,
                skeleton_band=config.skeleton_band,
                num_classes=config.num_classes,
                im_prefix=im_prefix,
                spacenet_naming_convention=spacenet_naming_convention)        

    #print ("len df:", len(df))
    #print ("outfile:", outfile_csv)
    t1 = time.time()
    logger1.info("Total time to run build_wkt_dir: {} seconds".format(t1-t0))
Exemplo n.º 11
0
def main():

    # if using config instead of argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()
    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    print("Running stitch.py...")

    save_overlay_and_raw = False  # switch to save the stitchin overlay and
    # non-normalized image

    # compression 0 to 9 (most compressed)
    compression_params = [cv2.IMWRITE_PNG_COMPRESSION, 5]

    folds_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.merged_dir)

    if config.num_folds > 1:
        im_dir = merge_dir
        im_prefix = ''
    else:
        im_dir = folds_dir
        im_prefix = 'fold0_'

    # output dirs
    out_dir_mask_raw = os.path.join(config.path_results_root,
                                    config.test_results_dir,
                                    config.stitched_dir_raw)
    out_dir_count = os.path.join(config.path_results_root,
                                 config.test_results_dir,
                                 config.stitched_dir_count)
    out_dir_mask_norm = os.path.join(config.path_results_root,
                                     config.test_results_dir,
                                     config.stitched_dir_norm)

    # assume tile csv is in data dir, not root dir
    path_tile_df_csv = os.path.join(config.path_data_root,
                                    os.path.dirname(config.test_sliced_dir),
                                    config.tile_df_csv)
    # try tile_df_csv in results path
    #path_tile_df_csv = os.path.join(config.path_results_root, config.test_results_dir, config.tile_df_csv)

    #out_dir_mask_norm = config.stitched_dir_norm #os.path.join(config.stitched_dir ,'mask_norm')
    #out_dir_mask_raw = config.stitched_dir_raw #os.path.join(config.stitched_dir, 'mask_raw')
    #out_dir_count = config.stitched_dir_count #os.path.join(config.stitched_dir, 'mask_count')
    #res_root_dir = os.path.dirname(config.merged_dir)
    ##out_dir_root = os.path.join(res_root_dir, 'stitched')
    #out_dir_mask_norm = os.path.join(res_root_dir, 'stitched/mask_norm')
    #out_dir_mask_raw = os.path.join(res_root_dir, 'stitched/mask_raw')
    #out_dir_count = os.path.join(res_root_dir, 'stitched/mask_count')

    # make dirs
    os.makedirs(out_dir_mask_norm, exist_ok=True)
    os.makedirs(out_dir_mask_raw, exist_ok=True)
    os.makedirs(out_dir_count, exist_ok=True)

    res_root_dir = os.path.join(config.path_results_root,
                                config.test_results_dir)
    log_file = os.path.join(res_root_dir, 'stitch.log')
    console, logger1 = make_logger.make_logger(log_file, logger_name='log')
    #    ###############################################################################
    #    # https://docs.python.org/3/howto/logging-cookbook.html#logging-to-multiple-destinations
    #    # set up logging to file - see previous section for more details
    #    res_root_dir = os.path.join(config.path_results_root, config.test_results_dir)
    #    log_file = os.path.join(res_root_dir, 'stitch.log')
    #    logging.basicConfig(level=logging.DEBUG,
    #                        format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
    #                        datefmt='%m-%d %H:%M',
    #                        filename=log_file,
    #                        filemode='w')
    #    # define a Handler which writes INFO messages or higher to the sys.stderr
    #    console = logging.StreamHandler()
    #    console.setLevel(logging.INFO)
    #    # set a format which is simpler for console use
    #    formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
    #    #formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
    #    # tell the handler to use this format
    #    console.setFormatter(formatter)
    #    # add the handler to the root logger
    #    logging.getLogger('').addHandler(console)
    #    logger1 = logging.getLogger('log')
    #    logger1.info("log file: {x}".format(x=log_file))
    #    ###############################################################################

    # read in df_pos
    #df_file = os.path.join(out_dir_root, 'tile_df.csv')
    df_pos_tot = pd.read_csv(path_tile_df_csv)
    logger1.info("len df_pos_tot: {x}".format(x=len(df_pos_tot)))
    #print ("len df_pos_tot:", len(df_pos_tot))
    t0 = time.time()
    ttot = 0

    # save for each individual image
    idxs = np.sort(np.unique(df_pos_tot['idx']))
    logger1.info("image idxs: {x}".format(x=idxs))
    #print ("image idxs:", idxs)
    for idx in idxs:
        logger1.info("\n")
        logger1.info("idx: {x} / {y}".format(x=idx + 1, y=len(idxs)))
        #print ("\nidx:", idx, "/", len(idxs))
        # filter by idx
        df_pos = df_pos_tot.loc[df_pos_tot['idx'] == idx]
        logger1.info("len df_pos: {x}".format(x=len(df_pos)))
        #print ("len df_pos:", len(df_pos))
        # execute
        t1 = time.time()
        name, mask_norm, mask_raw, overlay_count = \
                post_process_image(df_pos, im_dir,
                                   im_prefix=im_prefix,
                                   num_classes=config.num_classes,
                                   super_verbose=False)
        t2 = time.time()
        ttot += t2 - t1
        logger1.info("Time to run stitch for idx: {x} = {y} seconds".format(
            x=idx, y=t2 - t1))
        #print ("Time to run stitch for idx:", idx, "=", t2 - t1, "seconds")
        logger1.info("mask_norm.shape: {x}".format(x=mask_norm.shape))
        print("mask_norm.dtype:", mask_norm.dtype)
        print("mask_raw.dtype:", mask_raw.dtype)
        print("overlay_count.dtype:", overlay_count.dtype)
        print("np.max(overlay_count):", np.max(overlay_count))
        print("np.min(overlay_count):", np.min(overlay_count))

        # write to files (cv2 can't handle reading enormous files, can write large ones)
        print("Saving to files...")
        # remove prefix, if required
        if len(im_prefix) > 0:
            out_file_root = name.split(im_prefix)[-1] + '.tif'
        else:
            out_file_root = name + '.tif'

        logger1.info("out_file_root {x}:".format(x=out_file_root))
        #print ("out_file_root:", out_file_root)
        out_file_mask_norm = os.path.join(out_dir_mask_norm, out_file_root)
        out_file_mask_raw = os.path.join(out_dir_mask_raw, out_file_root)
        out_file_count = os.path.join(out_dir_count, out_file_root)

        if config.num_classes == 1:
            cv2.imwrite(out_file_mask_norm, mask_norm.astype(np.uint8),
                        compression_params)
            del mask_norm
            if save_overlay_and_raw:
                cv2.imwrite(out_file_mask_raw, mask_raw.astype(np.uint8),
                            compression_params)
            del mask_raw
        else:
            mask_norm = np.moveaxis(mask_norm, -1, 0).astype(np.uint8)
            skimage.io.imsave(out_file_mask_norm, mask_norm, compress=1)
            del mask_norm
            if save_overlay_and_raw:
                mask_raw = np.moveaxis(mask_raw, -1, 0).astype(np.uint8)
                skimage.io.imsave(out_file_mask_raw, mask_raw, compress=1)
            del mask_raw

        if save_overlay_and_raw:
            cv2.imwrite(out_file_count, overlay_count, compression_params)
        #cv2.imwrite(out_file_count, overlay_count.astype(np.uint8), compression_params)
        del overlay_count
        #skimage.io.imsave(out_file_mask_norm, mask_norm)
        #skimage.io.imsave(out_file_mask_raw, mask_raw)
        #skimage.io.imsave(out_file_count, overlay_count)

    t3 = time.time()
    logger1.info(
        "Time to run stitch.py and create large masks: {} seconds".format(
            ttot))
    logger1.info(
        "Time to run stitch.py and create large masks (and save): {} seconds".
        format(t3 - t0))
    #print ("Time to run stitch.py and create large masks:", ttot, "seconds")
    #print ("Time to run stitch.py and create large masks (and save):", t3 - t0, "seconds")

    return
def main():

    default_crs = {'init': 'epsg:4326'}

    # Vegas settings
    local = False
    fig_height = 12
    fig_width = 12
    node_color = '#ffdd1a'
    edge_color = '#ffdd1a'
    node_size = 0.2
    node_alpha = 0.7
    edge_linewidth = 0.3
    edge_alpha = 0.8
    orig_dest_node_size = 4.5 * node_size
    save_only_route_png = False  # True

    route_color = 'r'
    orig_dest_node_color = 'r'
    route_linewidth = 4 * edge_linewidth

    # local
    if local:
        pass

    else:
        parser = argparse.ArgumentParser()
        parser.add_argument('config_path')
        args = parser.parse_args()

        with open(args.config_path, 'r') as f:
            cfg = json.load(f)
            config = Config(**cfg)

        # outut files
        res_root_dir = os.path.join(config.path_results_root,
                                    config.test_results_dir)
        path_images_8bit = os.path.join(config.path_data_root,
                                        config.test_data_refined_dir)
        graph_dir = os.path.join(res_root_dir, config.graph_dir)
        out_dir = graph_dir.strip() + '_plots'
        #res_root_dir = config.results_dir #os.path.dirname(im_dir)
        #path_images_8bit = config.path_images_8bit
        #graph_dir = os.path.join(res_root_dir, 'graphs')

    # iterate through images and graphs, plot routes
    im_list = sorted(
        [z for z in os.listdir(path_images_8bit) if z.endswith('.tif')])

    if shuffle:
        random.shuffle(im_list)

    for i, im_root in enumerate(
            im_list):  #enumerate(os.listdir(path_images_8bit)):
        if not im_root.endswith('.tif'):
            continue

        if i >= max_plots:
            break

        im_root_no_ext = im_root.split('.tif')[0]
        im_file = os.path.join(path_images_8bit, im_root)
        graph_pkl = os.path.join(graph_dir, im_root_no_ext + '.gpickle')
        print("\n\n", i, "im_root:", im_root)
        print("  im_file:", im_file)
        print("  graph_pkl:", graph_pkl)

        # gpickle?
        print("Reading gpickle...")
        G = nx.read_gpickle(graph_pkl)

        # make sure geometries are not just strings
        print("Make sure geometries are not just strings...")
        for u, v, key, data in G.edges(keys=True, data=True):
            for attr_key in data:
                if (attr_key == 'geometry') and (type(data[attr_key]) == str):
                    #print ("update geometry...")
                    data[attr_key] = wkt.loads(data[attr_key])
                elif (attr_key == 'geometry_pix') and (type(data[attr_key])
                                                       == str):
                    data[attr_key] = wkt.loads(data[attr_key])
                else:
                    continue

        # print a node
        node = list(G.nodes())[-1]
        print(node, "random node props:", G.nodes[node])
        # print an edge
        edge_tmp = list(G.edges())[-1]
        print(edge_tmp, "random edge props:",
              G.edges([edge_tmp[0],
                       edge_tmp[1]]))  #G.edge[edge_tmp[0]][edge_tmp[1]])

        #node = G.nodes()[-1]
        #print ("node:", node, "props:", G.node[node])
        #u,v = G.edges()[-1]
        #print ("edge:", u,v, "props:", G.edge[u][v])

        # read in image, cv2 fails on large files
        print("Read in image...")
        try:
            #convert to rgb (cv2 reads in bgr)
            img_cv2 = cv2.imread(im_file, 1)
            print("img_cv2.shape:", img_cv2.shape)
            im = cv2.cvtColor(img_cv2, cv2.COLOR_BGR2RGB)
        except:
            im = skimage.io.imread(im_file,
                                   as_grey=False).astype(np.uint8)  #[::-1]

        # set dpi to approximate native resolution
        print("im.shape:", im.shape)
        desired_dpi = int(np.max(im.shape) / np.max([fig_height, fig_width]))
        print("desired dpi:", desired_dpi)
        # max out dpi at 3500
        dpi = int(np.min([3500, desired_dpi]))
        print("plot dpi:", dpi)

        # plot graph with image backround
        if not save_only_route_png:
            out_file_plot = os.path.join(out_dir,
                                         im_root_no_ext + '_ox_plot.tif')
            print("outfile_plot:", out_file_plot)
            plot_graph_pix(G,
                           im,
                           fig_height=fig_height,
                           fig_width=fig_width,
                           node_size=node_size,
                           node_alpha=node_alpha,
                           node_color=node_color,
                           edge_linewidth=edge_linewidth,
                           edge_alpha=edge_alpha,
                           edge_color=edge_color,
                           filename=out_file_plot,
                           default_dpi=dpi,
                           show=False,
                           save=True)
Exemplo n.º 13
0
def main():
    global logger1

    # min_subgraph_length_pix = 300
    min_spur_length_m = 0.001  # default = 5
    local = False  # True
    verbose = True
    super_verbose = False
    make_plots = False  # True
    save_shapefiles = True  # False
    pickle_protocol = 4  # 4 is most recent, python 2.7 can't read 4

    # local
    if local:
        pass

    else:
        parser = argparse.ArgumentParser()
        parser.add_argument('config_path')
        args = parser.parse_args()
        with open(args.config_path, 'r') as f:
            cfg = json.load(f)
            config = Config(**cfg)

        # outut files
        res_root_dir = os.path.join(config.path_results_root, config.test_results_dir)
        path_images = os.path.join(config.path_data_root, config.test_data_refined_dir)
        csv_file = os.path.join(res_root_dir, config.wkt_submission)
        graph_dir = os.path.join(res_root_dir, config.graph_dir)
        log_file = os.path.join(res_root_dir, 'wkt_to_G.log')
        os.makedirs(graph_dir, exist_ok=True)

        min_subgraph_length_pix = config.min_subgraph_length_pix
        min_spur_length_m = config.min_spur_length_m

    console, logger1 = make_logger.make_logger(log_file, logger_name='log')

    # read in wkt list
    logger1.info("df_wkt at: {}".format(csv_file))
    # print ("df_wkt at:", csv_file)
    df_wkt = pd.read_csv(csv_file)
    # columns=['ImageId', 'WKT_Pix'])

    # iterate through image ids and create graphs
    t0 = time.time()
    image_ids = np.sort(np.unique(df_wkt['ImageId']))
    print("image_ids:", image_ids)
    print("len image_ids:", len(image_ids))

    imfiles_args = [[image_id, config, graph_dir, path_images, df_wkt, pickle_protocol, min_spur_length_m,
                     min_subgraph_length_pix, super_verbose, verbose, save_shapefiles, make_plots] for image_id in
                    image_ids]

    with multiprocessing.Pool(16) as pool:
        list(pool.starmap(process_img, imfiles_args))

    # for i,image_id in enumerate(image_ids):
    #
    #     #if image_id != 'AOI_2_Vegas_img586':
    #     #    continue
    #     out_file = os.path.join(graph_dir, image_id.split('.')[0] + '.gpickle')
    #
    #     logger1.info("\n{x} / {y}, {z}".format(x=i+1, y=len(image_ids), z=image_id))
    #     #print ("\n")
    #     #print (i, "/", len(image_ids), image_id)
    #
    #     # for geo referencing, im_file should be the raw image
    #     if config.num_channels == 3:
    #         im_file = os.path.join(path_images, 'RGB-PanSharpen_' + image_id + '.tif')
    #     else:
    #         im_file = os.path.join(path_images, 'MUL-PanSharpen_' + image_id + '.tif')
    #     #im_file = os.path.join(path_images, image_id)
    #     if not os.path.exists(im_file):
    #         im_file = os.path.join(path_images, image_id + '.tif')
    #
    #     # filter
    #     df_filt = df_wkt['WKT_Pix'][df_wkt['ImageId'] == image_id]
    #     wkt_list = df_filt.values
    #     #wkt_list = [z[1] for z in df_filt_vals]
    #
    #     # print a few values
    #     logger1.info("\n{x} / {y}, num linestrings: {z}".format(x=i+1, y=len(image_ids), z=len(wkt_list)))
    #     #print ("\n", i, "/", len(image_ids), "num linestrings:", len(wkt_list))
    #     if verbose:
    #         print ("image_file:", im_file)
    #         print ("  wkt_list[:2]", wkt_list[:2])
    #
    #     if (len(wkt_list) == 0) or (wkt_list[0] == 'LINESTRING EMPTY'):
    #         G = nx.MultiDiGraph()
    #         nx.write_gpickle(G, out_file, protocol=pickle_protocol)
    #         continue
    #
    #     # create graph
    #     t1 = time.time()
    #     G = wkt_to_G(wkt_list, im_file=im_file,
    #                  min_subgraph_length_pix=min_subgraph_length_pix,
    #                  min_spur_length_m=min_spur_length_m,
    #                  verbose=super_verbose)
    #     t2 = time.time()
    #     if verbose:
    #         logger1.info("Time to create graph: {} seconds".format(t2-t1))
    #         #print ("Time to create graph:", t2-t1, "seconds")
    #
    #     if len(G.nodes()) == 0:
    #         nx.write_gpickle(G, out_file, protocol=pickle_protocol)
    #         continue
    #
    #     # print a node
    #     node = list(G.nodes())[-1]
    #     print (node, "random node props:", G.nodes[node])
    #     # print an edge
    #     # edge_tmp = list(G.edges())[-1]
    #     #print (edge_tmp, "random edge props:", G.edges([edge_tmp[0], edge_tmp[1]])) #G.edge[edge_tmp[0]][edge_tmp[1]])
    #     # print (edge_tmp, "random edge props:", G.get_edge_data(edge_tmp[0], edge_tmp[1]))
    #
    #     # save graph
    #     logger1.info("Saving graph to directory: {}".format(graph_dir))
    #     #print ("Saving graph to directory:", graph_dir)
    #     nx.write_gpickle(G, out_file, protocol=pickle_protocol)
    #
    #     # save shapefile as well?
    #     if save_shapefiles:
    #         logger1.info("Saving shapefile to directory: {}".format(graph_dir))
    #         try:
    #             ox.save_graph_shapefile(G, filename=image_id.split('.')[0] , folder=graph_dir, encoding='utf-8')
    #         except:
    #             print("Cannot save shapefile...")
    #         #out_file2 = os.path.join(graph_dir, image_id.split('.')[0] + '.graphml')
    #         #ox.save_graphml(G, image_id.split('.')[0] + '.graphml', folder=graph_dir)
    #
    #     # plot, if desired
    #     if make_plots:
    #         print ("Plotting graph...")
    #         outfile_plot = os.path.join(graph_dir, image_id)
    #         print ("outfile_plot:", outfile_plot)
    #         ox.plot_graph(G, fig_height=9, fig_width=9,
    #                       #save=True, filename=outfile_plot, margin=0.01)
    #                       )
    #         #plt.tight_layout()
    #         plt.savefig(outfile_plot, dpi=400)
    #
    #     #if i > 30:
    #     #    break

    tf = time.time()
    logger1.info("Time to run wkt_to_G.py: {} seconds".format(tf - t0))
Exemplo n.º 14
0
def main():

    # if using config instead of argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()
    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    print("Running stitch.py...")

    save_overlay_and_raw = False  # switch to save the stitchin overlay and
    # non-normalized image

    # compression 0 to 9 (most compressed)
    compression_params = [cv2.IMWRITE_PNG_COMPRESSION, 5]

    folds_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.merged_dir)

    if config.num_folds > 1:
        im_dir = merge_dir
        im_prefix = ''
    else:
        im_dir = folds_dir
        im_prefix = 'fold0_'

    # output dirs
    out_dir_mask_raw = os.path.join(config.path_results_root,
                                    config.test_results_dir,
                                    config.stitched_dir_raw)
    out_dir_count = os.path.join(config.path_results_root,
                                 config.test_results_dir,
                                 config.stitched_dir_count)
    out_dir_mask_norm = os.path.join(config.path_results_root,
                                     config.test_results_dir,
                                     config.stitched_dir_norm)

    # assume tile csv is in data dir, not root dir
    path_tile_df_csv = os.path.join(config.path_data_root,
                                    os.path.dirname(config.test_sliced_dir),
                                    config.tile_df_csv)

    # make dirs
    os.makedirs(out_dir_mask_norm, exist_ok=True)
    os.makedirs(out_dir_mask_raw, exist_ok=True)
    os.makedirs(out_dir_count, exist_ok=True)

    res_root_dir = os.path.join(config.path_results_root,
                                config.test_results_dir)
    log_file = os.path.join(res_root_dir, 'stitch.log')
    console, logger1 = make_logger.make_logger(log_file, logger_name='log')

    # read in df_pos
    #df_file = os.path.join(out_dir_root, 'tile_df.csv')
    df_pos_tot = pd.read_csv(path_tile_df_csv)
    logger1.info("len df_pos_tot: {x}".format(x=len(df_pos_tot)))
    #print ("len df_pos_tot:", len(df_pos_tot))
    t0 = time.time()
    ttot = 0

    # save for each individual image
    idxs = np.sort(np.unique(df_pos_tot['idx']))
    logger1.info("image idxs: {x}".format(x=idxs))
    #print ("image idxs:", idxs)
    for idx in idxs:
        logger1.info("\n")
        logger1.info("idx: {x} / {y}".format(x=idx + 1, y=len(idxs)))
        #print ("\nidx:", idx, "/", len(idxs))
        # filter by idx
        df_pos = df_pos_tot.loc[df_pos_tot['idx'] == idx]
        logger1.info("len df_pos: {x}".format(x=len(df_pos)))
        #print ("len df_pos:", len(df_pos))
        # execute
        t1 = time.time()
        name, mask_norm, mask_raw, overlay_count = \
                post_process_image(df_pos, im_dir,
                                   im_prefix=im_prefix,
                                   num_classes=config.num_classes,
                                   super_verbose=False)
        t2 = time.time()
        ttot += t2 - t1
        logger1.info("Time to run stitch for idx: {x} = {y} seconds".format(
            x=idx, y=t2 - t1))
        #print ("Time to run stitch for idx:", idx, "=", t2 - t1, "seconds")
        logger1.info("mask_norm.shape: {x}".format(x=mask_norm.shape))
        print("mask_norm.dtype:", mask_norm.dtype)
        print("mask_raw.dtype:", mask_raw.dtype)
        print("overlay_count.dtype:", overlay_count.dtype)
        print("np.max(overlay_count):", np.max(overlay_count))
        print("np.min(overlay_count):", np.min(overlay_count))

        # write to files (cv2 can't handle reading enormous files, can write large ones)
        print("Saving to files...")
        # remove prefix, if required
        if len(im_prefix) > 0:
            out_file_root = name.split(im_prefix)[-1] + '.tif'
        else:
            out_file_root = name + '.tif'

        logger1.info("out_file_root {x}:".format(x=out_file_root))
        #print ("out_file_root:", out_file_root)
        out_file_mask_norm = os.path.join(out_dir_mask_norm, out_file_root)
        out_file_mask_raw = os.path.join(out_dir_mask_raw, out_file_root)
        out_file_count = os.path.join(out_dir_count, out_file_root)

        if config.num_classes == 1:
            cv2.imwrite(out_file_mask_norm, mask_norm.astype(np.uint8),
                        compression_params)
            del mask_norm
            if save_overlay_and_raw:
                cv2.imwrite(out_file_mask_raw, mask_raw.astype(np.uint8),
                            compression_params)
            del mask_raw
        else:
            mask_norm = np.moveaxis(mask_norm, -1, 0).astype(np.uint8)
            skimage.io.imsave(out_file_mask_norm, mask_norm, compress=1)
            del mask_norm
            if save_overlay_and_raw:
                mask_raw = np.moveaxis(mask_raw, -1, 0).astype(np.uint8)
                skimage.io.imsave(out_file_mask_raw, mask_raw, compress=1)
            del mask_raw

        if save_overlay_and_raw:
            cv2.imwrite(out_file_count, overlay_count, compression_params)
        #cv2.imwrite(out_file_count, overlay_count.astype(np.uint8), compression_params)
        del overlay_count
        #skimage.io.imsave(out_file_mask_norm, mask_norm)
        #skimage.io.imsave(out_file_mask_raw, mask_raw)
        #skimage.io.imsave(out_file_count, overlay_count)

    t3 = time.time()
    logger1.info(
        "Time to run stitch.py and create large masks: {} seconds".format(
            ttot))
    logger1.info(
        "Time to run stitch.py and create large masks (and save): {} seconds".
        format(t3 - t0))
    #print ("Time to run stitch.py and create large masks:", ttot, "seconds")
    #print ("Time to run stitch.py and create large masks (and save):", t3 - t0, "seconds")

    return