def homographic_augmentation(data, add_homography=False, **config): with tf.name_scope('homographic_augmentation'): image_shape = tf.shape(data['image'])[:2] homography = sample_homography(image_shape, **config['params'])[0] warped_image = tf.contrib.image.transform(data['image'], homography, interpolation='BILINEAR') valid_mask = compute_valid_mask(image_shape, homography, config['valid_border_margin']) warped_points = warp_points(data['keypoints'], homography) warped_points = filter_points(warped_points, image_shape) ret = { **data, 'image': warped_image, 'keypoints': warped_points, 'valid_mask': valid_mask } if add_homography: ret['homography'] = homography return ret
def _warp_image(image): H = sample_homography(tf.shape(image)[:2]) warped_im = tf.contrib.image.transform(image, H, interpolation="BILINEAR") return {'warped_im': warped_im, 'H': H}
base_path = Path(DATA_PATH, 'COCO/val2014/') image_paths = list(base_path.iterdir()) output_dir = Path(DATA_PATH, 'COCO/patches/') if not output_dir.exists(): os.makedirs(output_dir) # Create the ops to warp an image tf_path = tf.placeholder(tf.string) # Read the image image = tf.read_file(tf_path) image = tf.image.decode_jpeg(image, channels=3) image = _preprocess(image) shape = tf.shape(image)[:2] # Warp the image H = sample_homography(tf.shape(image)[:2], **config['homographies']) warped_image = tf.contrib.image.transform(image, H, interpolation="BILINEAR") patch_ratio = config['homographies']['patch_ratio'] new_shape = tf.multiply(tf.cast(shape, tf.float32), patch_ratio) new_shape = tf.cast(new_shape, tf.int32) warped_image = tf.image.resize_images(warped_image, new_shape) H = invert_homography(H) H = flat2mat(H)[0, :, :] print("Generating patches of Coco val...") sess = tf.InteractiveSession() for num, path in enumerate(image_paths): new_path = Path(output_dir, str(num)) if not new_path.exists():
def step(i, probs, counts, images): #Sample image patch H = sample_homography(shape, **config['homographies']) H_inv = invert_homography(H) ############################################# H_ = shape[0] W = shape[1] row_c = tf.random_uniform(shape=[], minval=0, maxval=tf.cast(H_, tf.float32), dtype=tf.float32) col_c = tf.random_uniform(shape=[], minval=0, maxval=tf.cast(W, tf.float32), dtype=tf.float32) lambda_ = tf.constant(0.000006) ############################################# #apply the homography warped = H_transform(image, H, interpolation='BILINEAR') ############################################# #apply the radial distortion warped = distort(warped, lambda_, (row_c, col_c)) #count = warp_points_dist(tf.expand_dims(tf.ones(tf.shape(image)[:3]),-1), lambda_, (row_c,col_c), inverse=True) count = undistort(tf.expand_dims(tf.ones(tf.shape(image)[:3]), -1), lambda_, (row_c, col_c)) #count = tf.round(count) count = H_transform(count, H_inv, interpolation='NEAREST') mask = H_transform(tf.expand_dims(tf.ones(tf.shape(image)[:3]), -1), H, interpolation='NEAREST') mask = distort(mask, lambda_, (row_c, col_c)) ############################################# # Ignore the detections too close to the border to avoid artifacts if config['valid_border_margin']: kernel = cv.getStructuringElement( cv.MORPH_ELLIPSE, (config['valid_border_margin'] * 2, ) * 2) with tf.device('/cpu:0'): count = tf.nn.erosion2d( count, tf.to_float(tf.constant(kernel)[..., tf.newaxis]), [1, 1, 1, 1], [1, 1, 1, 1], 'SAME')[..., 0] + 1. mask = tf.nn.erosion2d( mask, tf.to_float(tf.constant(kernel)[..., tf.newaxis]), [1, 1, 1, 1], [1, 1, 1, 1], 'SAME')[..., 0] + 1. # Predict detection probabilities prob = net(warped)['prob'] prob = prob * mask prob_proj = undistort(tf.expand_dims(prob, -1), lambda_, (row_c, col_c)) prob_proj = H_transform(prob_proj, H_inv, interpolation='BILINEAR')[..., 0] prob_proj = prob_proj * count probs = tf.concat([probs, tf.expand_dims(prob_proj, -1)], axis=-1) counts = tf.concat([counts, tf.expand_dims(count, -1)], axis=-1) images = tf.concat([images, tf.expand_dims(warped, -1)], axis=-1) return i + 1, probs, counts, images