Exemplo n.º 1
0
def main():
    
    global logger1 
    
    # min_subgraph_length_pix = 300
    min_spur_length_m = 0.001  # default = 5
    local = False #True
    verbose = True
    super_verbose = False
    make_plots = False #True
    save_shapefiles = True #False
    pickle_protocol = 4     # 4 is most recent, python 2.7 can't read 4
    
    # local
    if local:
        albu_path = '/Users/avanetten/Documents/cosmiq/apls/albu_inference_mod'
        path_images = '/Users/avanetten/Documents/cosmiq/spacenet/data/spacenetv2/AOI_2_Vegas_Test/400m/RGB-PanSharpen'
        res_root_dir = os.path.join(albu_path, 'results/2m_4fold_512_30e_d0.2_g0.2_AOI_2_Vegas_Test')
        csv_file = os.path.join(res_root_dir, 'wkt_submission.csv')
        graph_dir = os.path.join(res_root_dir, 'graphs')
        log_file = os.path.join(res_root_dir, 'wkt_to_G.log')
        #os.makedirs(graph_dir, exist_ok=True)
        try:
            os.makedirs(graph_dir)
        except:
            pass
    
    # deployed on dev box
    else:
        parser = argparse.ArgumentParser()
        parser.add_argument('config_path')
        args = parser.parse_args()
        with open(args.config_path, 'r') as f:
            cfg = json.load(f)
            config = Config(**cfg)
            
        # outut files
        res_root_dir = os.path.join(config.path_results_root, config.test_results_dir)
        path_images = os.path.join(config.path_data_root, config.test_data_refined_dir)
        csv_file = os.path.join(res_root_dir, config.wkt_submission)
        graph_dir = os.path.join(res_root_dir, config.graph_dir)
        log_file = os.path.join(res_root_dir, 'wkt_to_G.log')
        os.makedirs(graph_dir, exist_ok=True)

        min_subgraph_length_pix = config.min_subgraph_length_pix
        min_spur_length_m = config.min_spur_length_m

    console, logger1 = make_logger.make_logger(log_file, logger_name='log')
#    ###############################################################################
#    # https://docs.python.org/3/howto/logging-cookbook.html#logging-to-multiple-destinations
#    # set up logging to file - see previous section for more details
#    logging.basicConfig(level=logging.DEBUG,
#                        format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
#                        datefmt='%m-%d %H:%M',
#                        filename=log_file,
#                        filemode='w')
#    # define a Handler which writes INFO messages or higher to the sys.stderr
#    console = logging.StreamHandler()
#    console.setLevel(logging.INFO)
#    # set a format which is simpler for console use
#    formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
#    #formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
#    # tell the handler to use this format
#    console.setFormatter(formatter)
#    # add the handler to the root logger
#    logging.getLogger('').addHandler(console)
#    logger1 = logging.getLogger('log')
#    logger1.info("log file: {x}".format(x=log_file))
#    ###############################################################################  
 
    
#    csv_file = os.path.join(res_root_dir, 'merged_wkt_list.csv')
#    graph_dir = os.path.join(res_root_dir, 'graphs')
#    #os.makedirs(graph_dir, exist_ok=True)
#    try:
#        os.makedirs(graph_dir)
#    except:
#        pass

    # read in wkt list
    logger1.info("df_wkt at: {}".format(csv_file))
    #print ("df_wkt at:", csv_file)
    df_wkt = pd.read_csv(csv_file)
    # columns=['ImageId', 'WKT_Pix'])

    # iterate through image ids and create graphs
    t0 = time.time()
    image_ids = np.sort(np.unique(df_wkt['ImageId']))
    print("image_ids:", image_ids)
    print("len image_ids:", len(image_ids))

    for i,image_id in enumerate(image_ids):
        
        #if image_id != 'AOI_2_Vegas_img586':
        #    continue
        out_file = os.path.join(graph_dir, image_id.split('.')[0] + '.gpickle')
        
        logger1.info("\n{x} / {y}, {z}".format(x=i+1, y=len(image_ids), z=image_id))
        #print ("\n")
        #print (i, "/", len(image_ids), image_id)
                    
        # for geo referencing, im_file should be the raw image
        if config.num_channels == 3:
            im_file = os.path.join(path_images, 'RGB-PanSharpen_' + image_id + '.tif')
        else:
            im_file = os.path.join(path_images, 'MUL-PanSharpen_' + image_id + '.tif')   
        #im_file = os.path.join(path_images, image_id)
        if not os.path.exists(im_file):
            im_file = os.path.join(path_images, image_id + '.tif')
        
        # filter 
        df_filt = df_wkt['WKT_Pix'][df_wkt['ImageId'] == image_id]
        wkt_list = df_filt.values
        #wkt_list = [z[1] for z in df_filt_vals]
        
        # print a few values
        logger1.info("\n{x} / {y}, num linestrings: {z}".format(x=i+1, y=len(image_ids), z=len(wkt_list)))
        #print ("\n", i, "/", len(image_ids), "num linestrings:", len(wkt_list))
        if verbose:
            print ("image_file:", im_file)
            print ("  wkt_list[:2]", wkt_list[:2])
    
        if (len(wkt_list) == 0) or (wkt_list[0] == 'LINESTRING EMPTY'):
            G = nx.MultiDiGraph()
            nx.write_gpickle(G, out_file, protocol=pickle_protocol)
            continue
        
        # create graph
        t1 = time.time()
        G = wkt_to_G(wkt_list, im_file=im_file, 
                     min_subgraph_length_pix=min_subgraph_length_pix,
                     min_spur_length_m=min_spur_length_m,
                     verbose=super_verbose)
        t2 = time.time()
        if verbose:
            logger1.info("Time to create graph: {} seconds".format(t2-t1))
            #print ("Time to create graph:", t2-t1, "seconds")
            
        if len(G.nodes()) == 0:
            nx.write_gpickle(G, out_file, protocol=pickle_protocol)
            continue
        
        # print a node
        node = list(G.nodes())[-1]
        print (node, "random node props:", G.nodes[node])
        # print an edge
        edge_tmp = list(G.edges())[-1]
        #print (edge_tmp, "random edge props:", G.edges([edge_tmp[0], edge_tmp[1]])) #G.edge[edge_tmp[0]][edge_tmp[1]])
        print (edge_tmp, "random edge props:", G.get_edge_data(edge_tmp[0], edge_tmp[1]))

        # save graph
        logger1.info("Saving graph to directory: {}".format(graph_dir))
        #print ("Saving graph to directory:", graph_dir)
        nx.write_gpickle(G, out_file, protocol=pickle_protocol)
        
        # save shapefile as well?
        if save_shapefiles:
            logger1.info("Saving shapefile to directory: {}".format(graph_dir))
            try:
                ox.save_graph_shapefile(G, filename=image_id.split('.')[0] , folder=graph_dir, encoding='utf-8')
            except:
                print("Cannot save shapefile...")
            #out_file2 = os.path.join(graph_dir, image_id.split('.')[0] + '.graphml')
            #ox.save_graphml(G, image_id.split('.')[0] + '.graphml', folder=graph_dir)

        # plot, if desired
        if make_plots:
            print ("Plotting graph...")
            outfile_plot = os.path.join(graph_dir, image_id)
            print ("outfile_plot:", outfile_plot)
            ox.plot_graph(G, fig_height=9, fig_width=9, 
                          #save=True, filename=outfile_plot, margin=0.01)
                          )
            #plt.tight_layout()
            plt.savefig(outfile_plot, dpi=400)
            
        #if i > 30:
        #    break
        
    tf = time.time()
    logger1.info("Time to run wkt_to_G.py: {} seconds".format(tf - t0))
Exemplo n.º 2
0
     
     fn_mapping = {
         'masks': lambda name: os.path.splitext(name)[0] + '.tif'  #'.png'
     }
     image_suffix = ''#'img'
     # set folds
     skip_folds = []
     if args.fold is not None:
         skip_folds = [i for i in range(4) if i != int(args.fold)]
     print("paths:", paths)
     print("fn_mapping:", fn_mapping)
     print("image_suffix:", image_suffix)
     ###################
 
     # set up logging
     console, logger = make_logger.make_logger(log_file, logger_name='log',
                                            write_to_console=bool(config.log_to_console))   
 
     logger.info("Testing: weight_dir: {x}".format(x=weight_dir))
     # execute
     t0 = time.time()
     logging.info("Saving eval outputs to: {x}".format(x=save_dir))
     folds = eval_cresi(config, paths, fn_mapping, image_suffix, save_dir,
                        test=True, weight_dir=weight_dir, 
                        num_channels=config.num_channels,
                        nfolds=config.num_folds,
                        save_im_gdal_format=save_im_gdal_format)
     t1 = time.time()
     logger.info("Time to run {x} folds for {y} = {z} seconds".format(x=len(folds), 
                  y=len(os.listdir(path_images)), z=t1-t0))
     print("Time to run", len(folds), "folds for", 
            len(os.listdir(path_images)), "=", t1 - t0, "seconds")
Exemplo n.º 3
0
def main():

    skimage_compress = 6
        # 0-9, https://scikit-image.org/docs/stable/api/skimage.external.tifffile.html#skimage.external.tifffile.TiffWriter
    
    # if using config instead of argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()
    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    # nothing to do if only one fold
    if config.slice_x <= 0 or config.slice_y <=0 or config.stride_x <= 0 \
                or config.stride_y <= 0:
        print("no need to stitch")
        return
    
    print("Running stitch.py...")
    save_overlay_and_raw = False  # switch to save the stitchin overlay and 
                                  # non-normalized image
    # compression 0 to 9 (most compressed)
    compression_params = [cv2.IMWRITE_PNG_COMPRESSION, 5]

    folds_dir = os.path.join(config.path_results_root, 
                             config.test_results_dir, 
                             config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, 
                             config.test_results_dir, 
                             config.merged_dir)
    
    if config.num_folds > 1:
        im_dir = merge_dir
        im_prefix = ''
    else:
        im_dir = folds_dir
        im_prefix = 'fold0_'
    
    
    # output dirs
    out_dir_mask_raw = os.path.join(config.path_results_root, config.test_results_dir, config.stitched_dir_raw)
    out_dir_count = os.path.join(config.path_results_root, config.test_results_dir, config.stitched_dir_count)
    out_dir_mask_norm = os.path.join(config.path_results_root, config.test_results_dir, config.stitched_dir_norm)
    
    # assume tile csv is in data dir, not root dir
    path_tile_df_csv = os.path.join(config.path_results_root, config.test_results_dir, config.tile_df_csv)
    
    # make dirs
    os.makedirs(out_dir_mask_norm, exist_ok=True)  
    os.makedirs(out_dir_mask_raw, exist_ok=True)  
    os.makedirs(out_dir_count, exist_ok=True)  
 
    res_root_dir = os.path.join(config.path_results_root, config.test_results_dir)
    log_file = os.path.join(res_root_dir, 'stitch.log')
    console, logger1 = make_logger.make_logger(log_file, logger_name='log',
                                               write_to_console=bool(config.log_to_console))
    
    # read in df_pos
    #df_file = os.path.join(out_dir_root, 'tile_df.csv')
    df_pos_tot = pd.read_csv(path_tile_df_csv)
    logger1.info("len df_pos_tot: {x}".format(x=len(df_pos_tot)))
    #print("len df_pos_tot:", len(df_pos_tot))
    t0 = time.time()
    ttot = 0
    
    # save for each individual image
    idxs = np.sort(np.unique(df_pos_tot['idx']))
    logger1.info("image idxs: {x}".format(x=idxs))
    #print("image idxs:", idxs)
    for idx in idxs:
        logger1.info("idx: {x} / {y}".format(x=idx+1, y=len(idxs)))
        # print("\nidx:", idx, "/", len(idxs))
        # filter by idx
        df_pos = df_pos_tot.loc[df_pos_tot['idx'] == idx]
        logger1.info("len df_pos: {x}".format(x=len(df_pos)))
        # print("len df_pos:", len(df_pos))
        # execute
        t1 = time.time()
        name, mask_norm, mask_raw, overlay_count = \
                post_process_image(df_pos, im_dir, 
                                   im_prefix=im_prefix,
                                   num_classes=config.num_classes,
                                   super_verbose=False)
        t2 = time.time()
        ttot += t2-t1
        logger1.info("Time to run stitch for idx: {x} = {y} seconds".format(x=idx, y=t2-t1))
        # print("Time to run stitch for idx:", idx, "=", t2 - t1, "seconds")
        logger1.info("mask_norm.shape: {x}".format(x=mask_norm.shape))
        print("mask_norm.dtype:", mask_norm.dtype)
        print("mask_raw.dtype:", mask_raw.dtype)
        print("overlay_count.dtype:", overlay_count.dtype)
        print("np.max(overlay_count):", np.max(overlay_count))
        print("np.min(overlay_count):", np.min(overlay_count))
    
        # write to files (cv2 can't handle reading enormous files, can write large ones)
        print("Saving to files...")
        # remove prefix, if required
        if len(im_prefix) > 0:
            out_file_root = name.split(im_prefix)[-1] + '.tif'
        else:
            out_file_root = name + '.tif'
            
        logger1.info("out_file_root {x}".format(x=out_file_root))
        #print("out_file_root:", out_file_root)
        out_file_mask_norm = os.path.join(out_dir_mask_norm, out_file_root)
        out_file_mask_raw = os.path.join(out_dir_mask_raw, out_file_root)
        out_file_count = os.path.join(out_dir_count, out_file_root)
        
        if config.num_classes == 1:
            cv2.imwrite(out_file_mask_norm, mask_norm.astype(np.uint8), compression_params)
            del mask_norm
            if save_overlay_and_raw:
                cv2.imwrite(out_file_mask_raw, mask_raw.astype(np.uint8), compression_params)
            del mask_raw
        else:
            mask_norm = np.moveaxis(mask_norm, -1, 0).astype(np.uint8)
            print("mask_norm.shape:", mask_norm.shape)
            print("mask_norm.dtype:", mask_norm.dtype)
            skimage.io.imsave(out_file_mask_norm, mask_norm,
                              check_contrast=False,
                              compress=skimage_compress)
            del mask_norm
            if save_overlay_and_raw:    
                mask_raw = np.moveaxis(mask_raw, -1, 0).astype(np.uint8)
                skimage.io.imsave(out_file_mask_raw, mask_raw,
                                  check_contrast=False,
                                  compress=skimage_compress)
            del mask_raw

        if save_overlay_and_raw:
            cv2.imwrite(out_file_count, overlay_count, compression_params)
        #cv2.imwrite(out_file_count, overlay_count.astype(np.uint8), compression_params)
        del overlay_count
        #skimage.io.imsave(out_file_mask_norm, mask_norm)
        #skimage.io.imsave(out_file_mask_raw, mask_raw)
        #skimage.io.imsave(out_file_count, overlay_count)
    
    t3 = time.time()
    logger1.info("Time to run stitch.py and create large masks: {} seconds".format(ttot))
    logger1.info("Time to run stitch.py and create large masks (and save): {} seconds".format(t3-t0))
    # print("Time to run stitch.py and create large masks:", ttot, "seconds")
    print("Time to run stitch.py and create large masks (and save):", t3 - t0, "seconds")

    return
Exemplo n.º 4
0
def main():
    '''See _arr_slicing_speed.ipynb for better tests'''
    global logger1

    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()

    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    ##########
    # Variables
    t0 = time.time()
    percentile = 85
    dx, dy = 4, 4  # nearest neighbors patch size
    min_z = 128  # min z value to consider a hit
    N_plots = 20
    figsize = (12, 12)

    # best colors
    node_color, edge_color = '#cc9900', '#ffbf00'  # gold

    default_node_size = 2
    plot_width_key, plot_width_mult = 'inferred_speed_mph', 0.085  # 0.08  # variable width
    #width_key, width_mult = 4, 1   # constant width
    if config.num_classes == 8:
        use_totband = True
    else:
        use_totband = False

    save_shapefiles = True
    use_weighted_mean = True
    variable_edge_speed = False
    run_08a_plot_graph_plus_im = False
    verbose = False
    ##########

    # input dirs
    res_root_dir = os.path.join(config.path_results_root,
                                config.test_results_dir)
    #path_images = os.path.join(config.path_data_root, config.test_data_refined_dir)
    graph_dir = os.path.join(res_root_dir, config.graph_dir)
    # get mask location, check if we are stitching together large images or not
    out_dir_mask_norm = os.path.join(config.path_results_root,
                                     config.test_results_dir,
                                     config.stitched_dir_norm)
    folds_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.merged_dir)
    mask_prefix = ''
    if os.path.exists(out_dir_mask_norm):
        mask_dir = out_dir_mask_norm
    else:
        if config.num_folds > 1:
            mask_dir = merge_dir
        else:
            mask_dir = folds_dir
            mask_prefix = 'fold0_'

    log_file = os.path.join(res_root_dir, 'skeleton_speed.log')
    console, logger1 = make_logger.make_logger(log_file, logger_name='log')

    # output dirs
    graph_speed_dir = os.path.join(res_root_dir, config.graph_dir + '_speed')
    os.makedirs(graph_speed_dir, exist_ok=True)
    logger1.info("graph_speed_dir: " + graph_speed_dir)

    # speed conversion dataframes (see _speed_data_prep.ipynb)
    speed_conversion_file = config.speed_conversion_file
    # load conversion file
    # get the conversion diction between pixel mask values and road speed (mph)
    if config.num_classes > 1:
        conv_df, conv_dict \
            = load_speed_conversion_dict_binned(speed_conversion_file)
    else:
        conv_df, conv_dict \
           = load_speed_conversion_dict_contin(speed_conversion_file)
    logger1.info("speed conv_dict: " + str(conv_dict))

    # Add travel time to entire dir
    add_travel_time_dir(graph_dir,
                        mask_dir,
                        conv_dict,
                        graph_speed_dir,
                        min_z=min_z,
                        dx=dx,
                        dy=dy,
                        percentile=percentile,
                        use_totband=use_totband,
                        use_weighted_mean=use_weighted_mean,
                        variable_edge_speed=variable_edge_speed,
                        mask_prefix=mask_prefix,
                        save_shapefiles=save_shapefiles,
                        verbose=verbose)

    t1 = time.time()
    logger1.info(
        "Time to execute add_travel_time_dir(): {x} seconds".format(x=t1 - t0))

    # plot a few
    if N_plots > 0:

        logger1.info("\nPlot a few...")
        # define output dir
        graph_speed_plots_dir = os.path.join(res_root_dir,
                                             config.graph_dir + '_speed_plots')
        os.makedirs(graph_speed_plots_dir, exist_ok=True)

        # plot graph on image (with width proportional to speed)
        path_images = os.path.join(config.path_data_root,
                                   config.test_data_refined_dir)
        image_list = [z for z in os.listdir(path_images) if z.endswith('tif')]
        if len(image_list) > N_plots:
            image_names = np.random.choice(image_list, N_plots)
        else:
            image_names = sorted(image_list)
        #logger1.info("image_names: " + image_names)

        for i, image_name in enumerate(image_names):
            if i > 1000:
                break

            image_path = os.path.join(path_images, image_name)
            logger1.info("\n\nPlotting: " + image_name + "  " + image_path)
            pkl_path = os.path.join(graph_speed_dir,
                                    image_name.split('.')[0] + '.gpickle')
            logger1.info("   pkl_path: " + pkl_path)
            if not os.path.exists(pkl_path):
                logger1.info("    missing pkl: " + pkl_path)
                continue
            G = nx.read_gpickle(pkl_path)
            #if not os.path.exists(image_path)

            figname = os.path.join(graph_speed_plots_dir, image_name)
            _ = plot_graph_on_im_yuge(G,
                                      image_path,
                                      figsize=figsize,
                                      show_endnodes=True,
                                      default_node_size=default_node_size,
                                      width_key=plot_width_key,
                                      width_mult=plot_width_mult,
                                      node_color=node_color,
                                      edge_color=edge_color,
                                      title=image_name,
                                      figname=figname,
                                      verbose=True,
                                      super_verbose=verbose)

    t2 = time.time()
    logger1.info(
        "Time to execute add_travel_time_dir(): {x} seconds".format(x=t1 - t0))
    logger1.info("Time to make plots: {x} seconds".format(x=t2 - t1))
    logger1.info("Total time: {x} seconds".format(x=t2 - t0))
def main():
    global logger1
    spacenet_naming_convention = False  # True

    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()

    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    im_prefix = ''
    # check if we are stitching together large images or not
    out_dir_mask_norm = None
    folds_dir = os.path.join(config.path_results_root,
                             config.test_results_dir,
                             config.folds_save_dir)
    im_dir = folds_dir
    os.makedirs(im_dir, exist_ok=True)

    # outut files
    res_root_dir = os.path.join(config.path_results_root,
                                config.test_results_dir)
    outfile_csv = os.path.join(res_root_dir, config.wkt_submission)
    # outfile_gpickle = os.path.join(res_root_dir, 'G_sknw.gpickle')
    out_ske_dir = os.path.join(res_root_dir, config.skeleton_dir)  # set to '' to not save
    os.makedirs(out_ske_dir, exist_ok=True)
    if len(config.skeleton_pkl_dir) > 0:
        out_gdir = os.path.join(res_root_dir, config.skeleton_pkl_dir)  # set to '' to not save
        os.makedirs(out_gdir, exist_ok=True)
    else:
        out_gdir = ''

    #    # use stitched dir if it has been populated
    #    if os.path.exists(config.stitched_dir_norm):
    #        im_dir = config.stitched_dir_norm
    #    else:
    #        im_dir = config.merged_dir
    #        # merged_dir = os.path.join(config.results_dir, config.folder + config.out_suff, 'merged')
    #
    #    # outut files
    #    res_root_dir = config.results_dir #os.path.dirname(im_dir)
    #    outfile_csv = os.path.join(res_root_dir, 'merged_wkt_list.csv')
    #    #outfile_gpickle = os.path.join(res_root_dir, 'G_sknw.gpickle')
    #    out_ske_dir = os.path.join(res_root_dir, 'skeleton')  # set to '' to not save
    #    os.makedirs(out_ske_dir, exist_ok=True)
    #    out_gdir = os.path.join(res_root_dir, 'sknw_gpickle')  # set to '' to not save
    #    os.makedirs(out_gdir, exist_ok=True)

    print("im_dir:", im_dir)
    print("out_ske_dir:", out_ske_dir)
    print("out_gdir:", out_gdir)

    thresh = config.skeleton_thresh
    #    # thresholds for each aoi
    #    threshes={'2': .3, '3': .3, '4': .3, '5': .2}
    #    thresh = threshes[config.aoi]
    min_subgraph_length_pix = config.min_subgraph_length_pix
    # min_subgraph_length_pix=200

    debug = False
    add_small = True
    fix_borders = True
    img_shape = ()  # (1300, 1300)
    skel_replicate = 5
    skel_clip = 2
    img_mult = 255
    hole_size = 300
    cv2_kernel_close = 7
    cv2_kernel_open = 7
    # max_out_size=(16000, 8000)  # works fine
    # max_out_size=(8003, 16009)   # works fine
    max_out_size = (2000000, 2000000)

    #    ###############################################################################
    #    # https://docs.python.org/3/howto/logging-cookbook.html#logging-to-multiple-destinations
    #    # set up logging to file - see previous section for more details
    #    res_root_dir = os.path.join(config.path_results_root, config.test_results_dir)
    #    log_file = os.path.join(res_root_dir, 'skeleton.log')
    #    logging.basicConfig(level=logging.DEBUG,
    #                        format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
    #                        datefmt='%m-%d %H:%M',
    #                        filename=log_file,
    #                        filemode='w')
    #    # define a Handler which writes INFO messages or higher to the sys.stderr
    #    console = logging.StreamHandler()
    #    console.setLevel(logging.INFO)
    #    # set a format which is simpler for console use
    #    formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
    #    #formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
    #    # tell the handler to use this format
    #    console.setFormatter(formatter)
    #    # add the handler to the root logger
    #    logging.getLogger('').addHandler(console)
    #    logger1 = logging.getLogger('log')
    #    logger1.info("log file: {x}".format(x=log_file))
    #    ###############################################################################
    log_file = os.path.join(res_root_dir, 'skeleton.log')
    console, logger1 = make_logger.make_logger(log_file, logger_name='log')

    print("Building wkts...")
    t0 = time.time()
    df = build_wkt_dir(im_dir, outfile_csv, out_ske_dir, out_gdir, thresh,  # threshes={'2': .3, '3': .3, '4': .3, '5': .2},
                       debug=debug, add_small=add_small, fix_borders=fix_borders,
                       img_shape=img_shape,
                       skel_replicate=skel_replicate, skel_clip=skel_clip,
                       img_mult=img_mult, hole_size=hole_size,
                       min_subgraph_length_pix=min_subgraph_length_pix,
                       cv2_kernel_close=cv2_kernel_close, cv2_kernel_open=cv2_kernel_open,
                       max_out_size=max_out_size,
                       skeleton_band=config.skeleton_band,
                       num_classes=config.num_classes,
                       im_prefix=im_prefix,
                       spacenet_naming_convention=spacenet_naming_convention)

    print("len df:", len(df))
    print("outfile:", outfile_csv)
    t1 = time.time()
    logger1.info("Total time to run build_wkt_dir: {} seconds".format(t1 - t0))
Exemplo n.º 6
0
def main():
    '''See _arr_slicing_speed.ipynb for better tests'''
    global logger1

    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()

    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    ##########
    # Variables
    t0 = time.time()
    percentile = 85  # percentil filter (default = 85)
    dx, dy = 6, 6  # nearest neighbors patch size  (default = (4, 4))
    min_z = 128  # min z value to consider a hit (default = 128)
    N_plots = 0
    n_threads = 12

    # set speed bands, assume a total channel is appended to the speed channels
    if config.skeleton_band > 0:
        max_speed_band = config.skeleton_band - 1
    else:
        max_speed_band = config.num_channels - 1

    #if config.num_classes == 8:
    #    use_totband = True
    #else:
    #    use_totband = False
    save_shapefiles = True
    use_weighted_mean = True
    variable_edge_speed = False
    run_08a_plot_graph_plus_im = False
    verbose = False
    n_threads = 12
    ##########

    # input dirs
    res_root_dir = os.path.join(config.path_results_root,
                                config.test_results_dir)
    #path_images = os.path.join(config.path_data_root, config.test_data_refined_dir)
    graph_dir = os.path.join(res_root_dir, config.graph_dir)
    # get mask location, check if we are stitching together large images or not
    out_dir_mask_norm = os.path.join(config.path_results_root,
                                     config.test_results_dir,
                                     config.stitched_dir_norm)
    folds_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.merged_dir)
    mask_prefix = ''
    if os.path.exists(out_dir_mask_norm):
        mask_dir = out_dir_mask_norm
    else:
        if config.num_folds > 1:
            mask_dir = merge_dir
        else:
            mask_dir = folds_dir
            mask_prefix = 'fold0_'

    #if os.path.exists(out_dir_mask_norm):
    #    mask_dir = out_dir_mask_norm
    #else:
    #    mask_dir = merge_dir
    log_file = os.path.join(res_root_dir, 'graph_speed.log')
    console, logger1 = make_logger.make_logger(log_file,
                                               logger_name='log',
                                               write_to_console=bool(
                                                   config.log_to_console))
    #console, logger1 = make_logger.make_logger(log_file, logger_name='log')

    # output dirs
    graph_speed_dir = os.path.join(res_root_dir, config.graph_dir + '_speed')
    os.makedirs(graph_speed_dir, exist_ok=True)
    logger1.info("graph_speed_dir: " + graph_speed_dir)

    # speed conversion dataframes (see _speed_data_prep.ipynb)
    speed_conversion_file = config.speed_conversion_file
    # get the conversion diction between pixel mask values and road speed (mph)
    if config.num_classes > 1:
        conv_df, conv_dict \
            = load_speed_conversion_dict_binned(speed_conversion_file)
    else:
        conv_df, conv_dict \
           = load_speed_conversion_dict_contin(speed_conversion_file)
    logger1.info("speed conv_dict: " + str(conv_dict))

    # Add travel time to entire dir
    add_travel_time_dir(graph_dir,
                        mask_dir,
                        conv_dict,
                        graph_speed_dir,
                        min_z=min_z,
                        dx=dx,
                        dy=dy,
                        percentile=percentile,
                        max_speed_band=max_speed_band,
                        use_weighted_mean=use_weighted_mean,
                        variable_edge_speed=variable_edge_speed,
                        mask_prefix=mask_prefix,
                        save_shapefiles=save_shapefiles,
                        n_threads=n_threads,
                        verbose=verbose)

    t1 = time.time()
    logger1.info(
        "Time to execute add_travel_time_dir(): {x} seconds".format(x=t1 - t0))

    # plot a few
    if N_plots > 0:

        logger1.info("\nPlot a few...")

        # plotting
        figsize = (12, 12)
        node_color, edge_color = 'OrangeRed', '#ff571a'
        node_color, edge_color = '#00e699', '#1affb2'  # aquamarine
        node_color, edge_color = 'turquoise', '#65e6d9'  # turqoise
        node_color, edge_color = 'deepskyblue', '#33ccff'
        node_color, edge_color = '#e68a00', '#ffa31a'  # orange
        node_color, edge_color = '#33ff99', 'SpringGreen'
        node_color, edge_color = 'DarkViolet', 'BlueViolet'  #'#b300ff'
        node_color, edge_color = '#a446d2', 'BlueViolet'
        #edge_color='#00a6ff',
        #node_color = '#33cccc' #turquise
        #edge_color = '#47d1d1'
        #node_color, edge_color = '#ffd100', '#e09900', # orange gold
        #node_color = '#29a329' # green
        #edge_color = '#33cc33'
        #node_color='#66ccff' # blue
        #edge_color='#999999'

        # best colors
        node_color, edge_color = '#cc9900', '#ffbf00'  # gold
        #node_color, edge_color = 'l#4dff4d', '#00e600' # green

        default_node_size = 2  #0.15 #4
        plot_width_key, plot_width_mult = 'inferred_speed_mph', 0.085  # 0.08  # variable width
        #width_key, width_mult = 4, 1   # constant width

        # define output dir
        graph_speed_plots_dir = os.path.join(res_root_dir,
                                             config.graph_dir + '_speed_plots')
        os.makedirs(graph_speed_plots_dir, exist_ok=True)

        # plot graph on image (with width proportional to speed)
        path_images = config.test_data_refined_dir
        # path_images = os.path.join(config.path_data_root, config.test_data_refined_dir)

        image_list = [z for z in os.listdir(path_images) if z.endswith('tif')]
        if len(image_list) > N_plots:
            image_names = np.random.choice(image_list, N_plots)
        else:
            image_names = sorted(image_list)
        #logger1.info("image_names: " + image_names)

        for i, image_name in enumerate(image_names):
            if i > 10:
                break

            image_path = os.path.join(path_images, image_name)
            logger1.info("\n\nPlotting: " + image_name + "  " + image_path)
            pkl_path = os.path.join(graph_speed_dir,
                                    image_name.split('.')[0] + '.gpickle')
            logger1.info("   pkl_path: " + pkl_path)
            if not os.path.exists(pkl_path):
                logger1.info("    missing pkl: " + pkl_path)
                continue
            G = nx.read_gpickle(pkl_path)
            #if not os.path.exists(image_path)

            figname = os.path.join(graph_speed_plots_dir, image_name)
            figname = figname.replace('.tif', '.png')
            _ = apls_plots.plot_graph_on_im_yuge(
                G,
                image_path,
                figsize=figsize,
                show_endnodes=True,
                default_node_size=default_node_size,
                width_key=plot_width_key,
                width_mult=plot_width_mult,
                node_color=node_color,
                edge_color=edge_color,
                title=image_name,
                figname=figname,
                verbose=True,
                super_verbose=verbose)

    t2 = time.time()
    logger1.info(
        "Time to execute add_travel_time_dir(): {x} seconds".format(x=t1 - t0))
    logger1.info("Time to make plots: {x} seconds".format(x=t2 - t1))
    logger1.info("Total time: {x} seconds".format(x=t2 - t0))
    print("Total time: {x} seconds".format(x=t2 - t0))
Exemplo n.º 7
0
def main():

    global logger1
    spacenet_naming_convention = False # True
    
    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()

    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    im_prefix = ''
    # check if we are stitching together large images or not
    out_dir_mask_norm = os.path.join(config.path_results_root, 
                                     config.test_results_dir, 
                                     config.stitched_dir_norm)
    folds_dir = os.path.join(config.path_results_root, 
                             config.test_results_dir, 
                             config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, 
                             config.test_results_dir, 
                             config.merged_dir)

    if os.path.exists(out_dir_mask_norm):
        im_dir = out_dir_mask_norm
    else:
        if config.num_folds > 1:
            im_dir = merge_dir
        else:
            im_dir = folds_dir
            im_prefix = 'fold0_'
            
    os.makedirs(im_dir, exist_ok=True)
  
    # outut files
    res_root_dir = os.path.join(config.path_results_root, 
                                config.test_results_dir)
    outfile_csv = os.path.join(res_root_dir, config.wkt_submission)
    #outfile_gpickle = os.path.join(res_root_dir, 'G_sknw.gpickle')
    out_ske_dir = os.path.join(res_root_dir, config.skeleton_dir)  # set to '' to not save
    os.makedirs(out_ske_dir, exist_ok=True)
    if len(config.skeleton_pkl_dir) > 0:
        out_gdir = os.path.join(res_root_dir, config.skeleton_pkl_dir)  # set to '' to not save
        os.makedirs(out_gdir, exist_ok=True)
    else:
        out_gdir = ''
         
    print ("im_dir:", im_dir)
    print ("out_ske_dir:", out_ske_dir)
    print ("out_gdir:", out_gdir)
        
    thresh = config.skeleton_thresh
    min_subgraph_length_pix = config.min_subgraph_length_pix
    
    debug=False
    add_small=True
    fix_borders=True
    img_shape=() #(1300, 1300)
    skel_replicate=5
    skel_clip=2
    img_mult=255
    hole_size=300
    cv2_kernel_close=7
    cv2_kernel_open=7
    #max_out_size=(16000, 8000)  # works fine
    #max_out_size=(8003, 16009)   # works fine
    max_out_size=(2000000, 2000000)
 
    log_file = os.path.join(res_root_dir, 'skeleton.log')
    console, logger1 = make_logger.make_logger(log_file, logger_name='log')
   
    
    print ("Building wkts...")
    t0 = time.time()
    df = build_wkt_dir(im_dir, outfile_csv, out_ske_dir, out_gdir, thresh, #threshes={'2': .3, '3': .3, '4': .3, '5': .2}, 
                debug=debug, add_small=add_small, fix_borders=fix_borders,
                img_shape=img_shape,
                skel_replicate=skel_replicate, skel_clip=skel_clip,
                img_mult=img_mult, hole_size=hole_size,
                min_subgraph_length_pix=min_subgraph_length_pix,
                cv2_kernel_close=cv2_kernel_close, cv2_kernel_open=cv2_kernel_open,
                max_out_size=max_out_size,
                skeleton_band=config.skeleton_band,
                num_classes=config.num_classes,
                im_prefix=im_prefix,
                spacenet_naming_convention=spacenet_naming_convention)        

    print ("len df:", len(df))
    print ("outfile:", outfile_csv)
    t1 = time.time()
    logger1.info("Total time to run build_wkt_dir: {} seconds".format(t1-t0))
Exemplo n.º 8
0
def main():
    
    global logger1 
    
    # min_subgraph_length_pix = 300
    simplify_graph = True #True # False
    verbose = True #sFalse
    super_verbose = False
    make_plots = False #True
    save_shapefiles = False #False
    pickle_protocol = 4     # 4 is most recent, python 2.7 can't read 4
    node_iter = 10000  # start int for node naming
    edge_iter = 10000  # start int for edge naming
    manually_reproject_nodes = False #True
    n_threads = 12
        
    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()
    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)
        
    # outut files
    res_root_dir = os.path.join(config.path_results_root, config.test_results_dir)
    path_images = os.path.join(config.test_data_refined_dir)
    # path_images = os.path.join(config.path_data_root, config.test_data_refined_dir)
    csv_file = os.path.join(res_root_dir, config.wkt_submission)
    graph_dir = os.path.join(res_root_dir, config.graph_dir)
    log_file = os.path.join(res_root_dir, 'wkt_to_G.log')
    os.makedirs(graph_dir, exist_ok=True)

    min_subgraph_length_pix = config.min_subgraph_length_pix
    min_spur_length_m = config.min_spur_length_m

    console, logger1 = make_logger.make_logger(log_file, logger_name='log',
                                               write_to_console=bool(config.log_to_console))   

    # read in wkt list
    logger1.info("df_wkt at: {}".format(csv_file))
    #print ("df_wkt at:", csv_file)
    df_wkt = pd.read_csv(csv_file)

    # iterate through image ids and create graphs
    t0 = time.time()
    image_ids = np.sort(np.unique(df_wkt['ImageId']))
    nfiles = len(image_ids)
    print("image_ids:", image_ids)
    print("len image_ids:", len(image_ids))
    n_threads = min(n_threads, nfiles)

    params = []
    for i,image_id in enumerate(image_ids):
        
        # if image_id != 'SN5_roads_test_public_AOI_9_San_Juan_PS-RGB_chip98':
        #     continue
        # print("\n")
        out_file = os.path.join(graph_dir, image_id.split('.')[0] + '.gpickle')
        
        if verbose:
            logger1.info("\n{x} / {y}, {z}".format(x=i+1, y=len(image_ids), z=image_id))
        #print ("\n")
        #print (i, "/", len(image_ids), image_id)
                    
        # for geo referencing, im_file should be the raw image
        if config.num_channels == 3:
            im_file = os.path.join(path_images, 'RGB-PanSharpen_' + image_id + '.tif')
        else:
            im_file = os.path.join(path_images, 'MUL-PanSharpen_' + image_id + '.tif')   
        #im_file = os.path.join(path_images, image_id)
        if not os.path.exists(im_file):
            im_file = os.path.join(path_images, image_id + '.tif')
        
        # filter 
        df_filt = df_wkt['WKT_Pix'][df_wkt['ImageId'] == image_id]
        wkt_list = df_filt.values
        #wkt_list = [z[1] for z in df_filt_vals]
        
        # print a few values
        if verbose:
            logger1.info("\n{x} / {y}, num linestrings: {z}".format(x=i+1, y=len(image_ids), z=len(wkt_list)))
        #print ("\n", i, "/", len(image_ids), "num linestrings:", len(wkt_list))
        if verbose:
            print ("image_file:", im_file)
            print ("  wkt_list[:2]", wkt_list[:2])
    
        if (len(wkt_list) == 0) or (wkt_list[0] == 'LINESTRING EMPTY'):
            G = nx.MultiDiGraph()
            nx.write_gpickle(G, out_file, protocol=pickle_protocol)
            continue
        else:
            params.append((wkt_list, im_file, min_subgraph_length_pix, \
                           node_iter, edge_iter, \
                           min_spur_length_m, simplify_graph, \
                           config.rdp_epsilon,
                           manually_reproject_nodes, 
                           out_file, graph_dir, n_threads, verbose))      

    # exectute
    if n_threads > 1:
        pool = Pool(n_threads)
        pool.map(wkt_to_G, params)
    else:
        wkt_to_G(params[0])
        
    tf = time.time()
    logger1.info("Time to run wkt_to_G.py: {} seconds".format(tf - t0))
    print("Time to run wkt_to_G.py: {} seconds".format(tf - t0))
Exemplo n.º 9
0
                            config.folds_save_dir)
    print("save_dir:", save_dir)
    os.makedirs(save_dir, exist_ok=True)

    fn_mapping = {
        'masks': lambda name: os.path.splitext(name)[0] + '.tif'  #'.png'
    }
    image_suffix = ''  #'img'
    # set folds
    print("paths:", paths)
    print("fn_mapping:", fn_mapping)
    print("image_suffix:", image_suffix)
    ###################

    # set up logging
    console, logger = make_logger.make_logger(log_file, logger_name='log')

    logger.info("Testing: weight_dir: {x}".format(x=weight_dir))
    # print ("Testing: weight_dir:", weight_dir)
    # execute
    t0 = time.time()
    logging.info("Saving eval outputs to: {x}".format(x=save_dir))
    #print ("Saving eval outputs to:", save_dir)
    folds = eval_cresi(config,
                       paths,
                       fn_mapping,
                       image_suffix,
                       save_dir,
                       test=True,
                       weight_dir=weight_dir,
                       num_channels=config.num_channels,
Exemplo n.º 10
0
def main():

    # if using config instead of argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()
    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    print("Running stitch.py...")

    save_overlay_and_raw = False  # switch to save the stitchin overlay and
    # non-normalized image

    # compression 0 to 9 (most compressed)
    compression_params = [cv2.IMWRITE_PNG_COMPRESSION, 5]

    folds_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.merged_dir)

    if config.num_folds > 1:
        im_dir = merge_dir
        im_prefix = ''
    else:
        im_dir = folds_dir
        im_prefix = 'fold0_'

    # output dirs
    out_dir_mask_raw = os.path.join(config.path_results_root,
                                    config.test_results_dir,
                                    config.stitched_dir_raw)
    out_dir_count = os.path.join(config.path_results_root,
                                 config.test_results_dir,
                                 config.stitched_dir_count)
    out_dir_mask_norm = os.path.join(config.path_results_root,
                                     config.test_results_dir,
                                     config.stitched_dir_norm)

    # assume tile csv is in data dir, not root dir
    path_tile_df_csv = os.path.join(config.path_data_root,
                                    os.path.dirname(config.test_sliced_dir),
                                    config.tile_df_csv)
    # try tile_df_csv in results path
    #path_tile_df_csv = os.path.join(config.path_results_root, config.test_results_dir, config.tile_df_csv)

    #out_dir_mask_norm = config.stitched_dir_norm #os.path.join(config.stitched_dir ,'mask_norm')
    #out_dir_mask_raw = config.stitched_dir_raw #os.path.join(config.stitched_dir, 'mask_raw')
    #out_dir_count = config.stitched_dir_count #os.path.join(config.stitched_dir, 'mask_count')
    #res_root_dir = os.path.dirname(config.merged_dir)
    ##out_dir_root = os.path.join(res_root_dir, 'stitched')
    #out_dir_mask_norm = os.path.join(res_root_dir, 'stitched/mask_norm')
    #out_dir_mask_raw = os.path.join(res_root_dir, 'stitched/mask_raw')
    #out_dir_count = os.path.join(res_root_dir, 'stitched/mask_count')

    # make dirs
    os.makedirs(out_dir_mask_norm, exist_ok=True)
    os.makedirs(out_dir_mask_raw, exist_ok=True)
    os.makedirs(out_dir_count, exist_ok=True)

    res_root_dir = os.path.join(config.path_results_root,
                                config.test_results_dir)
    log_file = os.path.join(res_root_dir, 'stitch.log')
    console, logger1 = make_logger.make_logger(log_file, logger_name='log')
    #    ###############################################################################
    #    # https://docs.python.org/3/howto/logging-cookbook.html#logging-to-multiple-destinations
    #    # set up logging to file - see previous section for more details
    #    res_root_dir = os.path.join(config.path_results_root, config.test_results_dir)
    #    log_file = os.path.join(res_root_dir, 'stitch.log')
    #    logging.basicConfig(level=logging.DEBUG,
    #                        format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
    #                        datefmt='%m-%d %H:%M',
    #                        filename=log_file,
    #                        filemode='w')
    #    # define a Handler which writes INFO messages or higher to the sys.stderr
    #    console = logging.StreamHandler()
    #    console.setLevel(logging.INFO)
    #    # set a format which is simpler for console use
    #    formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
    #    #formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
    #    # tell the handler to use this format
    #    console.setFormatter(formatter)
    #    # add the handler to the root logger
    #    logging.getLogger('').addHandler(console)
    #    logger1 = logging.getLogger('log')
    #    logger1.info("log file: {x}".format(x=log_file))
    #    ###############################################################################

    # read in df_pos
    #df_file = os.path.join(out_dir_root, 'tile_df.csv')
    df_pos_tot = pd.read_csv(path_tile_df_csv)
    logger1.info("len df_pos_tot: {x}".format(x=len(df_pos_tot)))
    #print ("len df_pos_tot:", len(df_pos_tot))
    t0 = time.time()
    ttot = 0

    # save for each individual image
    idxs = np.sort(np.unique(df_pos_tot['idx']))
    logger1.info("image idxs: {x}".format(x=idxs))
    #print ("image idxs:", idxs)
    for idx in idxs:
        logger1.info("\n")
        logger1.info("idx: {x} / {y}".format(x=idx + 1, y=len(idxs)))
        #print ("\nidx:", idx, "/", len(idxs))
        # filter by idx
        df_pos = df_pos_tot.loc[df_pos_tot['idx'] == idx]
        logger1.info("len df_pos: {x}".format(x=len(df_pos)))
        #print ("len df_pos:", len(df_pos))
        # execute
        t1 = time.time()
        name, mask_norm, mask_raw, overlay_count = \
                post_process_image(df_pos, im_dir,
                                   im_prefix=im_prefix,
                                   num_classes=config.num_classes,
                                   super_verbose=False)
        t2 = time.time()
        ttot += t2 - t1
        logger1.info("Time to run stitch for idx: {x} = {y} seconds".format(
            x=idx, y=t2 - t1))
        #print ("Time to run stitch for idx:", idx, "=", t2 - t1, "seconds")
        logger1.info("mask_norm.shape: {x}".format(x=mask_norm.shape))
        print("mask_norm.dtype:", mask_norm.dtype)
        print("mask_raw.dtype:", mask_raw.dtype)
        print("overlay_count.dtype:", overlay_count.dtype)
        print("np.max(overlay_count):", np.max(overlay_count))
        print("np.min(overlay_count):", np.min(overlay_count))

        # write to files (cv2 can't handle reading enormous files, can write large ones)
        print("Saving to files...")
        # remove prefix, if required
        if len(im_prefix) > 0:
            out_file_root = name.split(im_prefix)[-1] + '.tif'
        else:
            out_file_root = name + '.tif'

        logger1.info("out_file_root {x}:".format(x=out_file_root))
        #print ("out_file_root:", out_file_root)
        out_file_mask_norm = os.path.join(out_dir_mask_norm, out_file_root)
        out_file_mask_raw = os.path.join(out_dir_mask_raw, out_file_root)
        out_file_count = os.path.join(out_dir_count, out_file_root)

        if config.num_classes == 1:
            cv2.imwrite(out_file_mask_norm, mask_norm.astype(np.uint8),
                        compression_params)
            del mask_norm
            if save_overlay_and_raw:
                cv2.imwrite(out_file_mask_raw, mask_raw.astype(np.uint8),
                            compression_params)
            del mask_raw
        else:
            mask_norm = np.moveaxis(mask_norm, -1, 0).astype(np.uint8)
            skimage.io.imsave(out_file_mask_norm, mask_norm, compress=1)
            del mask_norm
            if save_overlay_and_raw:
                mask_raw = np.moveaxis(mask_raw, -1, 0).astype(np.uint8)
                skimage.io.imsave(out_file_mask_raw, mask_raw, compress=1)
            del mask_raw

        if save_overlay_and_raw:
            cv2.imwrite(out_file_count, overlay_count, compression_params)
        #cv2.imwrite(out_file_count, overlay_count.astype(np.uint8), compression_params)
        del overlay_count
        #skimage.io.imsave(out_file_mask_norm, mask_norm)
        #skimage.io.imsave(out_file_mask_raw, mask_raw)
        #skimage.io.imsave(out_file_count, overlay_count)

    t3 = time.time()
    logger1.info(
        "Time to run stitch.py and create large masks: {} seconds".format(
            ttot))
    logger1.info(
        "Time to run stitch.py and create large masks (and save): {} seconds".
        format(t3 - t0))
    #print ("Time to run stitch.py and create large masks:", ttot, "seconds")
    #print ("Time to run stitch.py and create large masks (and save):", t3 - t0, "seconds")

    return
Exemplo n.º 11
0
def main():
    global logger1

    # min_subgraph_length_pix = 300
    min_spur_length_m = 0.001  # default = 5
    local = False  # True
    verbose = True
    super_verbose = False
    make_plots = False  # True
    save_shapefiles = True  # False
    pickle_protocol = 4  # 4 is most recent, python 2.7 can't read 4

    # local
    if local:
        pass

    else:
        parser = argparse.ArgumentParser()
        parser.add_argument('config_path')
        args = parser.parse_args()
        with open(args.config_path, 'r') as f:
            cfg = json.load(f)
            config = Config(**cfg)

        # outut files
        res_root_dir = os.path.join(config.path_results_root, config.test_results_dir)
        path_images = os.path.join(config.path_data_root, config.test_data_refined_dir)
        csv_file = os.path.join(res_root_dir, config.wkt_submission)
        graph_dir = os.path.join(res_root_dir, config.graph_dir)
        log_file = os.path.join(res_root_dir, 'wkt_to_G.log')
        os.makedirs(graph_dir, exist_ok=True)

        min_subgraph_length_pix = config.min_subgraph_length_pix
        min_spur_length_m = config.min_spur_length_m

    console, logger1 = make_logger.make_logger(log_file, logger_name='log')

    # read in wkt list
    logger1.info("df_wkt at: {}".format(csv_file))
    # print ("df_wkt at:", csv_file)
    df_wkt = pd.read_csv(csv_file)
    # columns=['ImageId', 'WKT_Pix'])

    # iterate through image ids and create graphs
    t0 = time.time()
    image_ids = np.sort(np.unique(df_wkt['ImageId']))
    print("image_ids:", image_ids)
    print("len image_ids:", len(image_ids))

    imfiles_args = [[image_id, config, graph_dir, path_images, df_wkt, pickle_protocol, min_spur_length_m,
                     min_subgraph_length_pix, super_verbose, verbose, save_shapefiles, make_plots] for image_id in
                    image_ids]

    with multiprocessing.Pool(16) as pool:
        list(pool.starmap(process_img, imfiles_args))

    # for i,image_id in enumerate(image_ids):
    #
    #     #if image_id != 'AOI_2_Vegas_img586':
    #     #    continue
    #     out_file = os.path.join(graph_dir, image_id.split('.')[0] + '.gpickle')
    #
    #     logger1.info("\n{x} / {y}, {z}".format(x=i+1, y=len(image_ids), z=image_id))
    #     #print ("\n")
    #     #print (i, "/", len(image_ids), image_id)
    #
    #     # for geo referencing, im_file should be the raw image
    #     if config.num_channels == 3:
    #         im_file = os.path.join(path_images, 'RGB-PanSharpen_' + image_id + '.tif')
    #     else:
    #         im_file = os.path.join(path_images, 'MUL-PanSharpen_' + image_id + '.tif')
    #     #im_file = os.path.join(path_images, image_id)
    #     if not os.path.exists(im_file):
    #         im_file = os.path.join(path_images, image_id + '.tif')
    #
    #     # filter
    #     df_filt = df_wkt['WKT_Pix'][df_wkt['ImageId'] == image_id]
    #     wkt_list = df_filt.values
    #     #wkt_list = [z[1] for z in df_filt_vals]
    #
    #     # print a few values
    #     logger1.info("\n{x} / {y}, num linestrings: {z}".format(x=i+1, y=len(image_ids), z=len(wkt_list)))
    #     #print ("\n", i, "/", len(image_ids), "num linestrings:", len(wkt_list))
    #     if verbose:
    #         print ("image_file:", im_file)
    #         print ("  wkt_list[:2]", wkt_list[:2])
    #
    #     if (len(wkt_list) == 0) or (wkt_list[0] == 'LINESTRING EMPTY'):
    #         G = nx.MultiDiGraph()
    #         nx.write_gpickle(G, out_file, protocol=pickle_protocol)
    #         continue
    #
    #     # create graph
    #     t1 = time.time()
    #     G = wkt_to_G(wkt_list, im_file=im_file,
    #                  min_subgraph_length_pix=min_subgraph_length_pix,
    #                  min_spur_length_m=min_spur_length_m,
    #                  verbose=super_verbose)
    #     t2 = time.time()
    #     if verbose:
    #         logger1.info("Time to create graph: {} seconds".format(t2-t1))
    #         #print ("Time to create graph:", t2-t1, "seconds")
    #
    #     if len(G.nodes()) == 0:
    #         nx.write_gpickle(G, out_file, protocol=pickle_protocol)
    #         continue
    #
    #     # print a node
    #     node = list(G.nodes())[-1]
    #     print (node, "random node props:", G.nodes[node])
    #     # print an edge
    #     # edge_tmp = list(G.edges())[-1]
    #     #print (edge_tmp, "random edge props:", G.edges([edge_tmp[0], edge_tmp[1]])) #G.edge[edge_tmp[0]][edge_tmp[1]])
    #     # print (edge_tmp, "random edge props:", G.get_edge_data(edge_tmp[0], edge_tmp[1]))
    #
    #     # save graph
    #     logger1.info("Saving graph to directory: {}".format(graph_dir))
    #     #print ("Saving graph to directory:", graph_dir)
    #     nx.write_gpickle(G, out_file, protocol=pickle_protocol)
    #
    #     # save shapefile as well?
    #     if save_shapefiles:
    #         logger1.info("Saving shapefile to directory: {}".format(graph_dir))
    #         try:
    #             ox.save_graph_shapefile(G, filename=image_id.split('.')[0] , folder=graph_dir, encoding='utf-8')
    #         except:
    #             print("Cannot save shapefile...")
    #         #out_file2 = os.path.join(graph_dir, image_id.split('.')[0] + '.graphml')
    #         #ox.save_graphml(G, image_id.split('.')[0] + '.graphml', folder=graph_dir)
    #
    #     # plot, if desired
    #     if make_plots:
    #         print ("Plotting graph...")
    #         outfile_plot = os.path.join(graph_dir, image_id)
    #         print ("outfile_plot:", outfile_plot)
    #         ox.plot_graph(G, fig_height=9, fig_width=9,
    #                       #save=True, filename=outfile_plot, margin=0.01)
    #                       )
    #         #plt.tight_layout()
    #         plt.savefig(outfile_plot, dpi=400)
    #
    #     #if i > 30:
    #     #    break

    tf = time.time()
    logger1.info("Time to run wkt_to_G.py: {} seconds".format(tf - t0))
Exemplo n.º 12
0
def main():

    global logger1
    add_small = True
    verbose = True
    super_verbose = False
    spacenet_naming_convention = False  # True
    debug = False
    fix_borders = True
    img_shape = ()  # (1300, 1300)
    skel_replicate = 5
    skel_clip = 2
    img_mult = 255
    hole_size = 300
    cv2_kernel_close = 7
    cv2_kernel_open = 7
    kernel_blur = -1  # 25
    min_background_frac = -1  # 0.2
    max_out_size = (2000000, 2000000)
    n_threads = 12
    im_prefix = ''

    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()
    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    min_spur_length_pix = int(np.rint(config.min_spur_length_m / config.GSD))
    print("min_spur_length_pix:", min_spur_length_pix)
    use_medial_axis = bool(config.use_medial_axis)
    print("Use_medial_axis?", use_medial_axis)
    pix_extent = config.eval_rows - (2 * config.padding)

    # check if we are stitching together large images or not
    out_dir_mask_norm = os.path.join(config.path_results_root,
                                     config.test_results_dir,
                                     config.stitched_dir_norm)
    folds_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.folds_save_dir)
    merge_dir = os.path.join(config.path_results_root, config.test_results_dir,
                             config.merged_dir)

    if os.path.exists(out_dir_mask_norm):
        im_dir = out_dir_mask_norm
    else:
        if config.num_folds > 1:
            im_dir = merge_dir
        else:
            im_dir = folds_dir
            im_prefix = 'fold0_'

    os.makedirs(im_dir, exist_ok=True)

    # outut files
    res_root_dir = os.path.join(config.path_results_root,
                                config.test_results_dir)
    outfile_csv = os.path.join(res_root_dir, config.wkt_submission)
    #outfile_gpickle = os.path.join(res_root_dir, 'G_sknw.gpickle')
    out_ske_dir = os.path.join(res_root_dir,
                               config.skeleton_dir)  # set to '' to not save
    os.makedirs(out_ske_dir, exist_ok=True)
    if len(config.skeleton_pkl_dir) > 0:
        out_gdir = os.path.join(
            res_root_dir, config.skeleton_pkl_dir)  # set to '' to not save
        os.makedirs(out_gdir, exist_ok=True)
    else:
        out_gdir = ''

    print("im_dir:", im_dir)
    print("out_ske_dir:", out_ske_dir)
    print("out_gdir:", out_gdir)

    thresh = config.skeleton_thresh
    #    # thresholds for each aoi
    #    threshes={'2': .3, '3': .3, '4': .3, '5': .2}
    #    thresh = threshes[config.aoi]
    min_subgraph_length_pix = config.min_subgraph_length_pix
    #min_subgraph_length_pix=200

    log_file = os.path.join(res_root_dir, 'skeleton.log')
    console, logger1 = make_logger.make_logger(log_file,
                                               logger_name='log',
                                               write_to_console=bool(
                                                   config.log_to_console))

    # print("Building wkts...")
    t0 = time.time()
    df = build_wkt_dir(im_dir,
                       outfile_csv,
                       out_ske_dir,
                       out_gdir,
                       thresh,
                       debug=debug,
                       add_small=add_small,
                       fix_borders=fix_borders,
                       img_shape=img_shape,
                       skel_replicate=skel_replicate,
                       skel_clip=skel_clip,
                       img_mult=img_mult,
                       hole_size=hole_size,
                       min_subgraph_length_pix=min_subgraph_length_pix,
                       min_spur_length_pix=min_spur_length_pix,
                       cv2_kernel_close=cv2_kernel_close,
                       cv2_kernel_open=cv2_kernel_open,
                       max_out_size=max_out_size,
                       skeleton_band=config.skeleton_band,
                       num_classes=config.num_classes,
                       im_prefix=im_prefix,
                       spacenet_naming_convention=spacenet_naming_convention,
                       use_medial_axis=use_medial_axis,
                       kernel_blur=kernel_blur,
                       min_background_frac=min_background_frac,
                       n_threads=n_threads,
                       verbose=verbose,
                       super_verbose=super_verbose)

    print("len df:", len(df))
    print("outfile:", outfile_csv)
    t1 = time.time()
    logger1.info("Total time to run build_wkt_dir: {} seconds".format(t1 - t0))
    print("Total time to run build_wkt_dir:", t1 - t0, "seconds")
Exemplo n.º 13
0
def main():
    '''See _arr_slicing_speed.ipynb for better tests'''
    global logger1

    import argparse
    import json

    parser = argparse.ArgumentParser()
    parser.add_argument('config_path')
    args = parser.parse_args()

    with open(args.config_path, 'r') as f:
        cfg = json.load(f)
        config = Config(**cfg)

    ##########
    # Variables
    t0 = time.time()
    percentile = 85
    dx, dy = 4, 4  # nearest neighbors patch size
    min_z = 128  # min z value to consider a hit
    N_plots = 20
    figsize = (12, 12)
    node_color, edge_color = 'OrangeRed', '#ff571a'
    node_color, edge_color = '#00e699', '#1affb2'  # aquamarine
    node_color, edge_color = 'turquoise', '#65e6d9'  # turqoise
    node_color, edge_color = 'deepskyblue', '#33ccff'
    node_color, edge_color = '#e68a00', '#ffa31a'  # orange
    node_color, edge_color = '#33ff99', 'SpringGreen'
    node_color, edge_color = 'DarkViolet', 'BlueViolet'  # '#b300ff'
    node_color, edge_color = '#a446d2', 'BlueViolet'
    # node_color='#0086CC' # cosmiq logo color (blue)
    # edge_color='#00a6ff',
    # node_color = '#33cccc' #turquise
    # edge_color = '#47d1d1'
    # node_color, edge_color = '#ffd100', '#e09900', # orange gold
    # node_color = '#29a329' # green
    # edge_color = '#33cc33'
    # node_color='#66ccff' # blue
    # edge_color='#999999'

    # best colors
    node_color, edge_color = '#cc9900', '#ffbf00'  # gold
    # node_color, edge_color = 'l#4dff4d', '#00e600' # green

    default_node_size = 2  # 0.15 #4
    plot_width_key, plot_width_mult = 'inferred_speed_mph', 0.085  # 0.08  # variable width
    # width_key, width_mult = 4, 1   # constant width
    if config.num_classes >7:
        use_totband = True
    else:
        use_totband = False

    save_shapefiles = True
    use_weighted_mean = True
    variable_edge_speed = False
    run_08a_plot_graph_plus_im = False
    verbose = False
    ##########

    # input dirs
    res_root_dir = os.path.join(config.path_results_root, config.test_results_dir)
    # path_images = os.path.join(config.path_data_root, config.test_data_refined_dir)
    graph_dir = os.path.join(res_root_dir, config.graph_dir)
    # get mask location, check if we are stitching together large images or not
    folds_dir = os.path.join(config.path_results_root,
                             config.test_results_dir,
                             config.folds_save_dir)
    mask_prefix = ''
    mask_dir = folds_dir

    # if os.path.exists(out_dir_mask_norm):
    #    mask_dir = out_dir_mask_norm
    # else:
    #    mask_dir = merge_dir
    log_file = os.path.join(res_root_dir, 'skeleton_speed.log')
    console, logger1 = make_logger.make_logger(log_file, logger_name='log')
    #    ###############################################################################
    #    # https://docs.python.org/3/howto/logging-cookbook.html#logging-to-multiple-destinations
    #    # set up logging to file - see previous section for more details
    #    logging.basicConfig(level=logging.DEBUG,
    #                        format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
    #                        datefmt='%m-%d %H:%M',
    #                        filename=log_file,
    #                        filemode='w')
    #    # define a Handler which writes INFO messages or higher to the sys.stderr
    #    console = logging.StreamHandler()
    #    console.setLevel(logging.INFO)
    #    # set a format which is simpler for console use
    #    formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
    #    #formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
    #    # tell the handler to use this format
    #    console.setFormatter(formatter)
    #    # add the handler to the root logger
    #    logging.getLogger('').addHandler(console)
    #    logger1 = logging.getLogger('log')
    #    logger1.info("log file: {x}".format(x=log_file))
    #    ###############################################################################

    # output dirs
    graph_speed_dir = os.path.join(res_root_dir, config.graph_dir + '_speed')
    os.makedirs(graph_speed_dir, exist_ok=True)
    logger1.info("graph_speed_dir: " + graph_speed_dir)

    speed_conversion_file_binned = 'speed_conversion_binned10.csv'

    # load conversion file
    # get the conversion diction between pixel mask values and road speed (mph)
    conv_df, conv_dict \
        = load_speed_conversion_dict_binned(speed_conversion_file_binned)
    logger1.info("speed conv_dict: " + str(conv_dict))

    # Add travel time to entire dir
    add_travel_time_dir(graph_dir, mask_dir, conv_dict, graph_speed_dir,
                        min_z=min_z,
                        dx=dx, dy=dy,
                        percentile=percentile,
                        use_totband=use_totband,
                        use_weighted_mean=use_weighted_mean,
                        variable_edge_speed=variable_edge_speed,
                        mask_prefix=mask_prefix,
                        save_shapefiles=save_shapefiles,
                        verbose=verbose)

    t1 = time.time()
    logger1.info("Time to execute add_travel_time_dir(): {x} seconds".format(x=t1 - t0))

    # plot a few
    if N_plots > 0:

        logger1.info("\nPlot a few...")
        ## import apls_tools (or just copy plot_graph_on_in() func he)
        # local = False
        ## local
        # if local:
        #    apls_dir = '/raid/cosmiq/apls/apls/src'
        ## dev box
        # else:
        #    apls_dir = '/raid/local/src/apls/apls/src'
        # sys.path.append(apls_dir)
        # import apls_tools

        # define output dir
        graph_speed_plots_dir = os.path.join(res_root_dir, config.graph_dir + '_speed_plots')
        os.makedirs(graph_speed_plots_dir, exist_ok=True)

        # plot graph on image (with width proportional to speed)
        path_images = os.path.join(config.path_data_root, config.test_data_refined_dir)
        image_list = [z for z in os.listdir(path_images) if z.endswith('tif')]
        if len(image_list) > N_plots:
            image_names = np.random.choice(image_list, N_plots)
        else:
            image_names = sorted(image_list)
        # logger1.info("image_names: " + image_names)

        for i, image_name in enumerate(image_names):
            if i > 1000:
                break

            image_path = os.path.join(path_images, image_name)
            logger1.info("\n\nPlotting: " + image_name + "  " + image_path)
            pkl_path = os.path.join(graph_speed_dir, image_name.split('.')[0] + '.gpickle')
            logger1.info("   pkl_path: " + pkl_path)
            if not os.path.exists(pkl_path):
                logger1.info("    missing pkl: " + pkl_path)
                continue
            G = nx.read_gpickle(pkl_path)
            # if not os.path.exists(image_path)

            figname = os.path.join(graph_speed_plots_dir, image_name)
            _ = plot_graph_on_im_yuge(G, image_path, figsize=figsize,
                                      show_endnodes=True,
                                      default_node_size=default_node_size,
                                      width_key=plot_width_key, width_mult=plot_width_mult,
                                      node_color=node_color, edge_color=edge_color,
                                      title=image_name, figname=figname,
                                      verbose=True, super_verbose=verbose)

    t2 = time.time()
    logger1.info("Time to execute add_travel_time_dir(): {x} seconds".format(x=t1 - t0))
    logger1.info("Time to make plots: {x} seconds".format(x=t2 - t1))
    logger1.info("Total time: {x} seconds".format(x=t2 - t0))