コード例 #1
0
export_gen = data_gen_coco(files,
                           'export',
                           batch_size=config['export_batch_size'],
                           norm=config['normalize'],
                           **config)
data = export_gen.make_one_shot_iterator().get_next()

H, W, margin = tf.constant(config['preprocessing']['resize'][0]), tf.constant(config['preprocessing']['resize'][1]),\
               tf.constant(config['homography_adaptation']['valid_border_margin'])

outputs = homography_adaptation(data['image'], model,
                                config['homography_adaptation'])
prob = tf.map_fn(
    lambda p: box_nms(p,
                      config['nms'],
                      min_prob=config['detection_threshold'],
                      keep_top_k=config['top_k']), outputs['prob'])
prob = tf.image.crop_to_bounding_box(prob[..., tf.newaxis], margin, margin,
                                     H - 2 * margin, W - 2 * margin)
prob = tf.squeeze(tf.image.pad_to_bounding_box(prob, margin, margin, H, W),
                  axis=-1)

prediction = tf.cast(tf.greater_equal(prob, config['detection_threshold']),
                     dtype=tf.int32)

tf.keras.backend.get_session().graph.finalize()

while True:
    try:
        image, name, pred = tf.keras.backend.get_session().run(
            [data['image'], data['name'], prediction])
コード例 #2
0
probability1 = tf.image.crop_to_bounding_box(probability1[..., tf.newaxis],
                                             margin, margin, H1 - 2 * margin,
                                             W1 - 2 * margin)
probability1 = tf.squeeze(tf.image.pad_to_bounding_box(probability1, margin,
                                                       margin, H1, W1),
                          axis=-1)
probability2 = tf.image.crop_to_bounding_box(probability2[..., tf.newaxis],
                                             margin, margin, H2 - 2 * margin,
                                             W2 - 2 * margin)
probability2 = tf.squeeze(tf.image.pad_to_bounding_box(probability2, margin,
                                                       margin, H2, W2),
                          axis=-1)

probability1 = tf.map_fn(
    lambda p: box_nms(p,
                      config['nms'],
                      min_prob=config['detection_threshold'],
                      keep_top_k=config['top_k']), probability1)
probability2 = tf.map_fn(
    lambda p: box_nms(p,
                      config['nms'],
                      min_prob=config['detection_threshold'],
                      keep_top_k=config['top_k']), probability2)

descriptor1 = tf.image.resize_bicubic(
    outputs[..., 130:386],
    config['grid_size'] * tf.shape(outputs[..., 130:386])[1:3])
descriptor1 = tf.nn.l2_normalize(descriptor1, axis=-1)
descriptor2 = tf.image.resize_bicubic(
    outputs[..., 386:],
    config['grid_size'] * tf.shape(outputs[..., 386:])[1:3])
descriptor2 = tf.nn.l2_normalize(descriptor2, axis=-1)
コード例 #3
0
export_gen = data_gen_hpatches(files, norm=config['normalize'], **config)
dataset = export_gen.make_one_shot_iterator().get_next()

with tf.device('/cpu:0'):
    probability1 = tf.map_fn(
        lambda i: tf.py_func(lambda x: classical_detector(x, **config), [i], tf
                             .float32), dataset['image'])
    probability2 = tf.map_fn(
        lambda i: tf.py_func(lambda x: classical_detector(x, **config), [i], tf
                             .float32), dataset['warped_image'])
if config['events']:
    probability2 = probability2 * dataset['homography']['mask']

if config['nms']:
    probability1 = tf.map_fn(
        lambda p: box_nms(
            p, config['nms'], min_prob=0., keep_top_k=config['top_k']),
        probability1)
    probability2 = tf.map_fn(
        lambda p: box_nms(
            p, config['nms'], min_prob=0., keep_top_k=config['top_k']),
        probability2)

tf.keras.backend.get_session().graph.finalize()

i = 0
while True:
    try:
        if not config['events']:
            prob1, prob2, image, warped_image, homography = tf.keras.backend.get_session(
            ).run([
                probability1, probability2, dataset['image'],
コード例 #4
0
pbar = tqdm(total=config['eval_iter'] if config['eval_iter'] > 0 else None)

export_gen = data_gen_hpatches(files, norm=config['normalize'], **config)
dataset = export_gen.make_one_shot_iterator().get_next()

with tf.device('/cpu:0'):
    probability1, descriptor1 = tf.map_fn(lambda i: tf.py_func(lambda x: classical_detector_descriptor(x, **config),
        [i], (tf.float32, tf.float32)), dataset['image'], [tf.float32, tf.float32])
    probability2, descriptor2 = tf.map_fn(lambda i: tf.py_func(lambda x: classical_detector_descriptor(x, **config),
        [i], (tf.float32, tf.float32)), dataset['warped_image'], [tf.float32, tf.float32])

if config['events']:
    probability2 = probability2 * dataset['homography']['mask']

if config['nms']:
    probability1 = tf.map_fn(lambda p: box_nms(p, config['nms'], min_prob=0., keep_top_k=config['top_k']), probability1)
    probability2 = tf.map_fn(lambda p: box_nms(p, config['nms'], min_prob=0., keep_top_k=config['top_k']), probability2)

tf.keras.backend.get_session().graph.finalize()

i = 0
while True:
    try:
        if not config['events']:
            prob1, prob2, desc1, desc2, image, warped_image, homography =\
                tf.keras.backend.get_session().run([probability1, probability2, descriptor1, descriptor2,
                                                    dataset['image'], dataset['warped_image'], dataset['homography']])
        else:
            prob1, prob2, desc1, desc2, image, warped_image, homography = \
                tf.keras.backend.get_session().run([probability1, probability2, descriptor1, descriptor2,
                                                    dataset['image'], dataset['warped_image'],