def test_image(image_info, disp_maps_dir, disp_map_count, disp_max_abs_value, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir): # --- Load data --- # ori_image, ori_metadata, ori_gt_polygons = read.load_gt_data( config_test.DATASET_RAW_DIR, image_info["city"], image_info["number"]) image_name = read.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"]) # --- Load disp maps --- # disp_maps = load_disp_maps(disp_maps_dir, image_info, disp_map_count) test.test_image_with_gt_polygons_and_disp_maps( image_name, ori_image, ori_metadata, ori_gt_polygons, disp_maps, disp_max_abs_value, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir, output_shapefiles=config_test.OUTPUT_SHAPEFILES)
def test_image(runs_dirpath, dataset_raw_dirpath, image_info, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, output_dir, output_shapefiles): # --- Load data --- # # CHANGE the arguments of the load_gt_data() function if using your own and it does not take the same arguments: ori_image, ori_metadata, gt_polygons = read_bradbury_buildings.load_gt_data(dataset_raw_dirpath, image_info["city"], image_info["number"]) if INPUT_POLYGONS_FILENAME_EXTENSION is not None: gt_polygons = read_bradbury_buildings.load_polygons(dataset_raw_dirpath, image_info["city"], image_info["number"], INPUT_POLYGONS_FILENAME_EXTENSION) else: gt_polygons = gt_polygons if gt_polygons is not None: # CHANGE the arguments of the IMAGE_NAME_FORMAT format string if using your own and it does not take the same arguments: image_name = read_bradbury_buildings.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"]) print_utils.print_info("Processing image {}".format(image_name)) aligned_gt_polygons = test.test_align_gt(runs_dirpath, ori_image, ori_metadata, gt_polygons, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, output_dir, image_name, output_shapefiles=output_shapefiles) # Save aligned_gt_polygons in dataset dir: aligned_gt_polygons_filepath = read_bradbury_buildings.get_polygons_filepath(dataset_raw_dirpath, image_info["city"], image_info["number"], ALIGNED_GT_POLYGONS_FILENAME_EXTENSION) os.makedirs(os.path.dirname(aligned_gt_polygons_filepath), exist_ok=True) np.save(aligned_gt_polygons_filepath, aligned_gt_polygons)
def test_detect_new_buildings(image_info, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir): # --- Load data --- # ori_image, ori_metadata, ori_gt_polygons = read.load_gt_data( config_test.DATASET_RAW_DIR, image_info["city"], image_info["number"]) image_name = read.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"]) polygonization_params = { "fill_threshold": FILL_THRESHOLD, "outline_threshold": OUTLINE_THRESHOLD, "selem_width": SELEM_WIDTH, "iterations": ITERATIONS, } test.test_detect_new_buildings( image_name, ori_image, ori_metadata, ori_gt_polygons, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, polygonization_params, thresholds, test_output_dir, output_shapefiles=config_test.OUTPUT_SHAPEFILES)
def test_image(image_info, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir): # --- Load data --- # # CHANGE the arguments of the load_gt_data() function if using your own and it does not take the same arguments: ori_image, ori_metadata, ori_disp_polygons = read.load_gt_data(config_test.DATASET_RAW_DIR, image_info["city"], image_info["number"]) # CHANGE the arguments of the IMAGE_NAME_FORMAT format string if using your own and it does not take the same arguments: image_name = read.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"]) ori_gt_polygons = [] test.test(ori_image, ori_metadata, ori_gt_polygons, ori_disp_polygons, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir, image_name, output_shapefiles=config_test.OUTPUT_SHAPEFILES)
def test_image(runs_dirpath, dataset_raw_dirpath, image_info, disp_maps_dir, disp_map_count, disp_max_abs_value, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir, output_shapefiles): # --- Load data --- # ori_image, ori_metadata, ori_gt_polygons = read_inria.load_gt_data(dataset_raw_dirpath, image_info["city"], image_info["number"]) image_name = read_inria.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"]) # --- Load disp maps --- # disp_maps = load_disp_maps(disp_maps_dir, image_info, disp_map_count) test.test_image_with_gt_polygons_and_disp_maps(runs_dirpath, image_name, ori_image, ori_metadata, ori_gt_polygons, disp_maps, disp_max_abs_value, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir, output_shapefiles=output_shapefiles)
def load_data(raw_dirpath, patch_info_list): image_list = [] polygons_list = [] for patch_info in patch_info_list: additional_args = { "overwrite_polygon_dir_name": POLYGON_DIRNAME, } image, metadata, polygons = read.load_gt_data( raw_dirpath, patch_info["city"], patch_info["number"], additional_args=additional_args) scaled_bbox = np.array(patch_info["bbox"] / patch_info["scale_factor"], dtype=np.int) p_image = image[scaled_bbox[0]:scaled_bbox[2], scaled_bbox[1]:scaled_bbox[3], :] image_list.append(p_image) p_polygons = polygon_utils.crop_polygons_to_patch_if_touch( polygons, scaled_bbox) polygons_list.append(p_polygons) return image_list, polygons_list
def test_image(runs_dirpath, dataset_raw_dirpath, image_info, disp_maps_dir, disp_map_count, disp_max_abs_value, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir, output_shapefiles): # --- Load data --- # ori_image, ori_metadata, ori_gt_polygons = read_bradbury_buildings.load_gt_data(dataset_raw_dirpath, image_info["city"], image_info["number"]) image_name = read_bradbury_buildings.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"]) # --- Randomly drop some polygons --- # if KEEP_PROB < 1: ori_gt_polygons = drop_items(ori_gt_polygons, KEEP_PROB) # --- Load disp maps --- # disp_maps = load_disp_maps(disp_maps_dir, image_info, disp_map_count) test.test_image_with_gt_polygons_and_disp_maps(runs_dirpath, image_name, ori_image, ori_metadata, ori_gt_polygons, disp_maps, disp_max_abs_value, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir, output_shapefiles=output_shapefiles)
def generate_disp_maps(dataset_raw_dir, image_info, disp_map_params, thresholds, output_dir): print("Generating {} displacement maps for {} {}...".format( disp_map_params["disp_map_count"], image_info["city"], image_info["number"])) disp_map_filename_format = "{}.disp_{:02d}.disp_map.npy" accuracies_filename_format = "{}.disp_{:02d}.accuracy.npy" # --- Load data --- # ori_image, ori_metadata, ori_gt_polygons = read.load_gt_data( dataset_raw_dir, image_info["city"], image_info["number"]) image_name = read.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"]) spatial_shape = ori_image.shape[:2] ori_normed_disp_field_maps = math_utils.create_displacement_field_maps( spatial_shape, disp_map_params["disp_map_count"], disp_map_params["disp_modes"], disp_map_params["disp_gauss_mu_range"], disp_map_params["disp_gauss_sig_scaling"]) disp_polygons_list = test.generate_disp_data( ori_normed_disp_field_maps, ori_gt_polygons, disp_map_params["disp_max_abs_value"]) # Save disp maps and accuracies individually for i, (ori_normed_disp_field_map, disp_polygons) in enumerate( zip(ori_normed_disp_field_maps, disp_polygons_list)): disp_map_filename = disp_map_filename_format.format(image_name, i) disp_map_filepath = os.path.join(output_dir, disp_map_filename) np.save(disp_map_filepath, ori_normed_disp_field_map) accuracies_filename = accuracies_filename_format.format(image_name, i) accuracies_filepath = os.path.join(output_dir, accuracies_filename) integer_thresholds = [ threshold for threshold in thresholds if (int(threshold) == threshold) ] accuracies = test.measure_accuracies(ori_gt_polygons, disp_polygons, integer_thresholds, accuracies_filepath)
def compute_grads(raw_dirpath, runs_dirpath, run_name, ds_fac, overwrite_config, tile_info_list, polygon_dirname, output_dirname, output_filepath_format): # -- Params: # Setup run dir and load config file run_dir = run_utils.setup_run_dir(runs_dirpath, run_name) _, checkpoints_dir = run_utils.setup_run_subdirs(run_dir) config = run_utils.load_config(config_dirpath=run_dir) # --- Instantiate model output_res = model.MapAlignModel.get_output_res( overwrite_config["input_res"], config["pool_count"]) map_align_model = model.MapAlignModel( config["model_name"], overwrite_config["input_res"], config["add_image_input"], config["image_channel_count"], config["image_feature_base_count"], config["add_poly_map_input"], config["poly_map_channel_count"], config["poly_map_feature_base_count"], config["common_feature_base_count"], config["pool_count"], config["add_disp_output"], config["disp_channel_count"], config["add_seg_output"], config["seg_channel_count"], output_res, overwrite_config["batch_size"], config["loss_params"], config["level_loss_coefs_params"], config["learning_rate_params"], config["weight_decay"], config["image_dynamic_range"], config["disp_map_dynamic_range_fac"], config["disp_max_abs_value"]) map_align_model.setup_compute_grads() # Add ops to compute gradients saver = tf.train.Saver(save_relative_paths=True) with tf.Session() as sess: # Restore checkpoint restore_checkpoint_success = map_align_model.restore_checkpoint( sess, saver, checkpoints_dir) if not restore_checkpoint_success: sys.exit('No checkpoint found in {}'.format(checkpoints_dir)) # Compute patch count patch_total_count = 0 for tile_info in tile_info_list: patch_total_count += len(tile_info["bbox_list"]) pbar = tqdm(total=patch_total_count, desc="Computing patch gradients: ") for tile_info in tile_info_list: # --- Path setup: unused_filepath = output_filepath_format.format( dir=raw_dirpath, fold=tile_info["fold"], out_dir=output_dirname, tile="", b0=0, b1=0, b2=0, b3=0, out_name="", ext="") os.makedirs(os.path.dirname(unused_filepath), exist_ok=True) tile_name = read.IMAGE_NAME_FORMAT.format( city=tile_info["city"], number=tile_info["number"]) # Compute grads for that image additional_args = { "overwrite_polygon_dir_name": polygon_dirname, } # t = time.clock() image, metadata, polygons = read.load_gt_data( raw_dirpath, tile_info["city"], tile_info["number"], additional_args=additional_args) # t_read = time.clock() - t # Downsample image, polygons = process_utils.downsample_data( image, metadata, polygons, ds_fac, config["reference_pixel_size"]) spatial_shape = image.shape[:2] # Draw polygon map # t = time.clock() polygon_map = polygon_utils.draw_polygon_map(polygons, spatial_shape, fill=True, edges=True, vertices=True) # t_draw = time.clock() - t t_grads = 0 t_save = 0 for bbox in tile_info["bbox_list"]: p_im = image[bbox[0]:bbox[2], bbox[1]:bbox[3], :] p_polygon_map = polygon_map[bbox[0]:bbox[2], bbox[1]:bbox[3], :] # p_polygons = polygon_utils.crop_polygons_to_patch_if_touch(polygons, bbox) # Grad compute t = time.clock() grads = map_align_model.compute_grads(sess, p_im, p_polygon_map) t_grads += time.clock() - t # Saving t = time.clock() flattened_grads_x = get_flattened_gradients(grads["x"]) flattened_grads_y = get_flattened_gradients(grads["y"]) flattened_grads = np.stack( [flattened_grads_x, flattened_grads_y], axis=-1) # # Save patch for later visualization # im_filepath = output_filepath_format.format(dir=raw_dirpath, fold=tile_info["fold"], # out_dir=output_dirname, tile=tile_name, # b0=bbox[0], b1=bbox[1], b2=bbox[2], b3=bbox[3], # out_name="image", ext="png") # skimage.io.imsave(im_filepath, p_im) # # Save polygons as well # polygons_filepath = output_filepath_format.format(dir=raw_dirpath, fold=tile_info["fold"], # out_dir=output_dirname, tile=tile_name, # b0=bbox[0], b1=bbox[1], b2=bbox[2], b3=bbox[3], # out_name="polygons", ext="npy") # np.save(polygons_filepath, p_polygons) # Save grads grads_filepath = output_filepath_format.format( dir=raw_dirpath, fold=tile_info["fold"], out_dir=output_dirname, tile=tile_name, b0=bbox[0], b1=bbox[1], b2=bbox[2], b3=bbox[3], out_name="grads", ext="npy") np.save(grads_filepath, flattened_grads) t_save += time.clock() - t pbar.update(len(tile_info["bbox_list"])) pbar.set_postfix(t_grads=t_grads, t_save=t_save) pbar.close()
def process_image(dataset_raw_dirpath, image_info, patch_stride, patch_res, downsampling_factors, disp_max_abs_value, include_polygons, downsampling_factor_writers): """ Writes to all the writers (one for each resolution) all sample patches extracted from the image_info. :param raw_dirpath: :param image_info: :param patch_stride: :param patch_res: :param downsampling_factors: :param disp_max_abs_value: :param include_polygons: :param downsampling_factor_writers: :return: """ ori_image, ori_metadata, ori_gt_polygons = read.load_gt_data(dataset_raw_dirpath, image_info["city"], image_info["number"]) if ori_gt_polygons is None: return False ori_gt_polygons = polygon_utils.polygons_remove_holes(ori_gt_polygons) # TODO: Remove # Remove redundant vertices ori_gt_polygons = polygon_utils.simplify_polygons(ori_gt_polygons, tolerance=1) # visualization.init_figures(["gt_data"], figsize=(60, 40)) # visualization.plot_example_polygons("gt_data", ori_image, ori_gt_polygons) # Create displacement maps ori_normed_disp_field_maps = math_utils.create_displacement_field_maps(ori_image.shape[:2], config.DISP_MAP_COUNT, config.DISP_MODES, config.DISP_GAUSS_MU_RANGE, config.DISP_GAUSS_SIG_SCALING) # TODO: uncomment # ori_normed_disp_field_maps = np.zeros((config.DISP_MAP_COUNT, ori_image.shape[0], ori_image.shape[1], 2)) # TODO: remove # # TODO: remove # np.random.seed(seed=0) # colors = np.random.randint(0, 255, size=(len(downsampling_factors), 3), dtype=np.uint8) for index, downsampling_factor in enumerate(downsampling_factors): print("downsampling_factor: {}".format(downsampling_factor)) # Downsample ground-truth image, gt_polygons, normed_disp_field_maps = downsample_gt_data(ori_image, ori_metadata, ori_gt_polygons, ori_normed_disp_field_maps, downsampling_factor) spatial_shape = image.shape[:2] # Random color # image = np.tile(colors[index], reps=[image.shape[0], image.shape[1], 1]) # TODO: remove # Draw gt polygon map gt_polygon_map = polygon_utils.draw_polygon_map(gt_polygons, spatial_shape, fill=True, edges=True, vertices=True) # Generate final displacement disp_polygons_list, disp_polygon_maps = generate_disp_data(normed_disp_field_maps, gt_polygons, disp_max_abs_value, spatial_shape) # Compress data gt_polygons = [polygon.astype(np.float16) for polygon in gt_polygons] disp_polygons_list = [[polygon.astype(np.float16) for polygon in polygons] for polygons in disp_polygons_list] disp_field_maps = normed_disp_field_maps * 32767 # int16 max value = 32767 disp_field_maps = np.round(disp_field_maps) disp_field_maps = disp_field_maps.astype(np.int16) # Cut sample into patches if include_polygons: patches = process_sample_into_patches(patch_stride, patch_res, image, gt_polygon_map, disp_field_maps, disp_polygon_maps, gt_polygons, disp_polygons_list) else: patches = process_sample_into_patches(patch_stride, patch_res, image, gt_polygon_map, disp_field_maps, disp_polygon_maps) for patch in patches: save_patch_to_tfrecord(patch, downsampling_factor_writers[downsampling_factor]) return True