Пример #1
0
    def setUp(self):
        self.n_samples = 10
        self.SO3_GROUP = SpecialOrthogonalGroup(n=3)
        self.SE3_GROUP = SpecialEuclideanGroup(n=3)
        self.S1 = Hypersphere(dimension=1)
        self.S2 = Hypersphere(dimension=2)
        self.H2 = HyperbolicSpace(dimension=2)

        plt.figure()
Пример #2
0
class TestVisualizationMethods(unittest.TestCase):
    _multiprocess_can_split_ = True

    def setUp(self):
        self.n_samples = 10
        self.SO3_GROUP = SpecialOrthogonalGroup(n=3)
        self.SE3_GROUP = SpecialEuclideanGroup(n=3)
        self.S2 = Hypersphere(dimension=2)
        self.H2 = HyperbolicSpace(dimension=2)

    def test_plot_points_so3(self):
        points = self.SO3_GROUP.random_uniform(self.n_samples)
        visualization.plot(points, space='SO3_GROUP')

    def test_plot_points_se3(self):
        points = self.SE3_GROUP.random_uniform(self.n_samples)
        visualization.plot(points, space='SE3_GROUP')

    def test_plot_points_s2(self):
        points = self.S2.random_uniform(self.n_samples)
        visualization.plot(points, space='S2')

    def test_plot_points_h2_poincare_disk(self):
        points = self.H2.random_uniform(self.n_samples)
        visualization.plot(points, space='H2_poincare_disk')

    def test_plot_points_h2_poincare_half_plane(self):
        points = self.H2.random_uniform(self.n_samples)
        visualization.plot(points, space='H2_poincare_half_plane')

    def test_plot_points_h2_klein_disk(self):
        points = self.H2.random_uniform(self.n_samples)
        visualization.plot(points, space='H2_klein_disk')
Пример #3
0
    def setUp(self):
        gs.random.seed(1234)

        n = 3
        group = SpecialEuclideanGroup(n=n)

        # Diagonal left and right invariant metrics
        diag_mat_at_identity = gs.zeros([group.dimension, group.dimension])
        diag_mat_at_identity[0:3, 0:3] = 1 * gs.eye(3)
        diag_mat_at_identity[3:6, 3:6] = 1 * gs.eye(3)

        left_diag_metric = InvariantMetric(
            group=group,
            inner_product_mat_at_identity=diag_mat_at_identity,
            left_or_right='left')
        right_diag_metric = InvariantMetric(
            group=group,
            inner_product_mat_at_identity=diag_mat_at_identity,
            left_or_right='right')

        # General left and right invariant metrics
        # TODO(xxx): replace by general SPD matrix
        sym_mat_at_identity = gs.eye(group.dimension)

        left_metric = InvariantMetric(
            group=group,
            inner_product_mat_at_identity=sym_mat_at_identity,
            left_or_right='left')

        right_metric = InvariantMetric(
            group=group,
            inner_product_mat_at_identity=sym_mat_at_identity,
            left_or_right='right')

        metrics = {
            'left_diag': left_diag_metric,
            'right_diag_metric': right_diag_metric,
            'left': left_metric,
            'right': right_metric
        }

        # General case for the point
        point_1 = gs.array([-0.2, 0.9, 0.5, 5., 5., 5.])
        point_2 = gs.array([0., 2., -0.1, 30., 400., 2.])
        # Edge case for the point, angle < epsilon,
        point_small = gs.array([-1e-7, 0., -7 * 1e-8, 6., 5., 9.])

        self.group = group
        self.metrics = metrics

        self.left_diag_metric = left_diag_metric
        self.right_diag_metric = right_diag_metric
        self.left_metric = left_metric
        self.right_metric = right_metric
        self.point_1 = point_1
        self.point_2 = point_2
        self.point_small = point_small
Пример #4
0
    def setUp(self):
        warnings.simplefilter('ignore', category=ImportWarning)

        gs.random.seed(1234)

        n = 3
        group = SpecialEuclideanGroup(n=n)

        # Diagonal left and right invariant metrics
        diag_mat_at_identity = gs.eye(group.dimension)

        left_diag_metric = InvariantMetric(
            group=group,
            inner_product_mat_at_identity=diag_mat_at_identity,
            left_or_right='left')
        right_diag_metric = InvariantMetric(
            group=group,
            inner_product_mat_at_identity=diag_mat_at_identity,
            left_or_right='right')

        # General left and right invariant metrics
        # TODO(nina): Replace the matrix below by a general SPD matrix.
        sym_mat_at_identity = gs.eye(group.dimension)

        left_metric = InvariantMetric(
            group=group,
            inner_product_mat_at_identity=sym_mat_at_identity,
            left_or_right='left')

        right_metric = InvariantMetric(
            group=group,
            inner_product_mat_at_identity=sym_mat_at_identity,
            left_or_right='right')

        metrics = {
            'left_diag': left_diag_metric,
            'right_diag_metric': right_diag_metric,
            'left': left_metric,
            'right': right_metric
        }

        # General case for the point
        point_1 = gs.array([[-0.2, 0.9, 0.5, 5., 5., 5.]])
        point_2 = gs.array([[0., 2., -0.1, 30., 400., 2.]])
        # Edge case for the point, angle < epsilon,
        point_small = gs.array([[-1e-7, 0., -7 * 1e-8, 6., 5., 9.]])

        self.group = group
        self.metrics = metrics

        self.left_diag_metric = left_diag_metric
        self.right_diag_metric = right_diag_metric
        self.left_metric = left_metric
        self.right_metric = right_metric
        self.point_1 = point_1
        self.point_2 = point_2
        self.point_small = point_small
Пример #5
0
    def __init__(self, weight):

        assert weight.shape != SE3_DIM, 'Weight vector must be of shape [6]'

        self.weight = weight
        self.SE3_GROUP = SpecialEuclideanGroup(N)
        self.metric = InvariantMetric( 
            group=self.SE3_GROUP, 
            inner_product_mat_at_identity=np.eye(SE3_DIM) * self.weight, 
            left_or_right='left')
Пример #6
0
    def define_loss(self):
        with tf.name_scope('Loss'):
            print("Loss ===============================================Start")
            with tf.variable_scope('Resize_for_VAE'):
                y_hat_dm_resize = tf.image.resize_bilinear(
                    self.y_hat_dm[:, :, :, 3:5],
                    size=(self.hyper_params.vae_h, self.hyper_params.vae_w),
                    align_corners=False,
                    half_pixel_centers=False,
                    name='y_hat_dm_resize')
                y_cam_resize = tf.image.resize_bilinear(
                    self.x_cam_train,
                    size=(self.hyper_params.vae_h, self.hyper_params.vae_w),
                    align_corners=False,
                    half_pixel_centers=False,
                    name='y_cam_resize')
                vae_inputs = tf.concat([y_cam_resize, y_hat_dm_resize],
                                       -1,
                                       name='vae_input')
                print("    [vae inputs]           {}".format(vae_inputs))

            self.tolerance_regularization, _ = tolerance_regularizer(
                inputs=vae_inputs,
                vae_latent_dim=self.hyper_params.vae_latent_dim,
                is_training=self.is_training)
            self.frozen_vars = tf.get_collection(
                tf.GraphKeys.TRAINABLE_VARIABLES, 'Tolerance_Regularization')

            with tf.variable_scope('SE3_Loss'):
                print(
                    "    [SE3 Loss]           ##################################"
                )
                # TODO: Hard coded batch number!!! BAD!!!
                se3_pred = tf.reshape(self.y_hat_se3param,
                                      (self.batch_size, 6))
                se3_true = tf.reshape(self.y_se3param, (self.batch_size, 6))
                print("    [y_hat_se3param]           {}".format(
                    self.y_hat_se3param))
                print("    [y_se3param    ]           {}".format(
                    self.y_se3param))
                print("    [se3_pred     ]           {}".format(se3_pred))
                print("    [se3_true     ]           {}".format(se3_true))
                # Consider implementing a native one instead of using 3rd party lib
                SE3_GROUP = SpecialEuclideanGroup(3,
                                                  epsilon=np.finfo(
                                                      np.float32).eps)
                metric = SE3_GROUP.left_canonical_metric
                self.loss_geodesic = tf.reduce_mean(
                    lie_group.loss(se3_pred, se3_true, SE3_GROUP, metric))

        with tf.name_scope('Loss'):
            self.loss = tf.identity(self.loss_geodesic +
                                    self.hyper_params.regularizer_factor *
                                    self.tolerance_regularization,
                                    name='loss')
Пример #7
0
def test_se3():
    # rpy <--> xyz convention
    # http://web.mit.edu/2.05/www/Handout/HO2.PDF
    # examples:
    # [r, p, y, x, y, z]
    SE3_GROUP = SpecialEuclideanGroup(3, epsilon=np.finfo(np.float32).eps)
    metric = SE3_GROUP.left_canonical_metric

    identity = tf.constant(np.array([0., 0, 0, 0, 0, 0], dtype=np.float32), name='identity')
    r90 = tf.constant(np.array([1.5707963, 0., 0, 0, 0, 0], dtype=np.float32), name='r90')
    x10 =tf.constant(np.array([0., 0, 0, 10, 0, 0], dtype=np.float32), name='x10')
    r90_x10 = tf.constant(np.array([1.5707963, 0, 0, 10, 0, 0], dtype=np.float32), name='r90_x10')

    test_se3s = tf.constant(np.array([[1.5707963, 0, 0, 0, 0, 0], [0, 0, 0, 10, 0, 0], [1.5707963, 0, 0, 10, 0, 0]], dtype=np.float32), name='test_se3s')
    test_identities = tf.constant(np.array([[0., 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]], dtype=np.float32),  name='test_se3s')

    dist_r90_to_identity = lie_group.loss(r90, identity, SE3_GROUP, metric)
    dist_x10_to_identity = lie_group.loss(x10, identity, SE3_GROUP, metric)
    dist_x10_to_r90 = lie_group.loss(x10, r90, SE3_GROUP, metric)
    psudo_riemannian_log_dist_x10_to_r90 = dist_r90_to_identity + dist_x10_to_identity  # Decoupled
    dist_r90_to_x10 = lie_group.loss(r90, x10, SE3_GROUP, metric)
    dist_r90_x10_to_identity = lie_group.loss(r90_x10, identity, SE3_GROUP, metric)

    dist_batch = lie_group.loss(test_se3s, test_identities, SE3_GROUP, metric)
    mean_dist_batch = tf.reduce_mean(dist_batch)

    test_dict = {
        'dist_r90_to_identity': dist_r90_to_identity,
        'dist_x10_to_identity': dist_x10_to_identity,
        'dist_x10_to_r90': dist_x10_to_r90,
        'psudo_riemannian_log_dist_x10_to_r90': psudo_riemannian_log_dist_x10_to_r90,
        'dist_r90_to_x10': dist_r90_to_x10,
        'dist_r90_x10_to_identity': dist_r90_x10_to_identity,
        'dist_batch': dist_batch,
        'mean_dist_batch': mean_dist_batch
    }

    _compare_dict = get_compares_dict()

    for _k, _v in _compare_dict.items():
        test_dict['dist_{}_to_identity'.format(_k)] = lie_group.loss(tf.constant(_v['se3'], name=_k), identity, SE3_GROUP, metric)

    with tf.Session() as sess:
        for _k, _v in test_dict.items():
            print("{} : {}".format(_k, sess.run(_v)))
    """
Пример #8
0
"""Unit tests for visualization module."""

import matplotlib
matplotlib.use('Agg')  # NOQA
import unittest

import geomstats.visualization as visualization
from geomstats.hypersphere import Hypersphere
from geomstats.special_euclidean_group import SpecialEuclideanGroup
from geomstats.special_orthogonal_group import SpecialOrthogonalGroup

SO3_GROUP = SpecialOrthogonalGroup(n=3)
SE3_GROUP = SpecialEuclideanGroup(n=3)
S2 = Hypersphere(dimension=2)

# TODO(nina): add tests for examples


class TestVisualizationMethods(unittest.TestCase):
    _multiprocess_can_split_ = True

    def setUp(self):
        self.n_samples = 10

    def test_plot_points_so3(self):
        points = SO3_GROUP.random_uniform(self.n_samples)
        visualization.plot(points, space='SO3_GROUP')

    def test_plot_points_se3(self):
        points = SE3_GROUP.random_uniform(self.n_samples)
        visualization.plot(points, space='SE3_GROUP')
Пример #9
0
"""
Predict on SE3: losses.
"""
import numpy as np

import geomstats.lie_group as lie_group
from geomstats.special_euclidean_group import SpecialEuclideanGroup
from geomstats.special_orthogonal_group import SpecialOrthogonalGroup

SE3 = SpecialEuclideanGroup(n=3)
SO3 = SpecialOrthogonalGroup(n=3)


def loss(y_pred,
         y_true,
         metric=SE3.left_canonical_metric,
         representation='vector'):
    """
    Loss function given by a riemannian metric on a Lie group,
    by default the left-invariant canonical metric.
    """
    if y_pred.ndim == 1:
        y_pred = np.expand_dims(y_pred, axis=0)
    if y_true.ndim == 1:
        y_true = np.expand_dims(y_true, axis=0)

    if representation == 'quaternion':
        y_pred_rot_vec = SO3.rotation_vector_from_quaternion(y_pred[:, :4])
        y_pred = np.hstack([y_pred_rot_vec, y_pred[:, 4:]])
        y_true_rot_vec = SO3.rotation_vector_from_quaternion(y_true[:, :4])
        y_true = np.hstack([y_true_rot_vec, y_true[:, 4:]])
Пример #10
0
def main(args):

    SE3_GROUP = SpecialEuclideanGroup(3)
    metric = SE3_GROUP.left_canonical_metric

    reader_train = PoseNetReader([FLAGS.dataset])

    # Get Input Tensors
    image, y_true = reader_train.read_and_decode()

    # Construct model and encapsulating all ops into scopes, making
    # Tensorboard's Graph visualization more convenient
    print('Making Model')
    with tf.name_scope('Model'):
        py_x, _ = inception.inception_v1(tf.cast(image, tf.float32),
                                         num_classes=6,
                                         is_training=False)
        # tanh(pred_angle) required to prevent infinite spins on rotation axis
        y_pred = tf.concat((tf.nn.tanh(py_x[:, :3]), py_x[:, 3:]), axis=1)
        loss = tf.reduce_mean(lie_group.loss(y_pred, y_true, SE3_GROUP,
                                             metric))

    print('Initializing Variables...')
    init_op = tf.group(tf.global_variables_initializer(),
                       tf.local_variables_initializer())

    # Main Testing Routine
    with tf.Session() as sess:
        # Run the initializer
        sess.run(init_op)

        # Start Queue Threads
        coord = tf.train.Coordinator()
        threads = tf.train.start_queue_runners(coord=coord)

        # Load saved weights
        print('Loading Trained Weights')
        saver = tf.train.Saver()
        latest_checkpoint = tf.train.latest_checkpoint(FLAGS.model_dir)
        saver.restore(sess, latest_checkpoint)

        i = 0

        # Inference cycle
        try:
            while True:
                _y_pred, _y_true, _loss = sess.run([y_pred, y_true, loss])
                print('Iteration:', i, 'loss:', _loss)
                print('_y_pred:', _y_pred)
                print('_y_true:', _y_true)
                print('\n')
                i = i + 1

        except tf.errors.OutOfRangeError:
            print('End of Testing Data')

        except KeyboardInterrupt:
            print('KeyboardInterrupt!')

        finally:
            print('Stopping Threads')
            coord.request_stop()
            coord.join(threads)
Пример #11
0
def main(argv):

    # TF Record
    datafiles = FLAGS.data_dir + '/test/' + FLAGS.subject_id + '.tfrecord'
    dataset = tf.data.TFRecordDataset(datafiles)
    dataset = dataset.map(_parse_function_ifind)
    # dataset = dataset.repeat()
    # dataset = dataset.shuffle(FLAGS.queue_buffer)
    dataset = dataset.batch(1)
    image, vec, qt, AP1, AP2, AP3 = dataset.make_one_shot_iterator().get_next()

    # Nifti Volume
    subject_path = FLAGS.scan_dir + '/test/' + FLAGS.subject_id + '.nii.gz'
    fixed_image_sitk_tmp = sitk.ReadImage(subject_path, sitk.sitkFloat32)
    fixed_image_sitk = sitk.GetImageFromArray(
        sitk.GetArrayFromImage(fixed_image_sitk_tmp))
    fixed_image_sitk = sitk.RescaleIntensity(fixed_image_sitk, 0, 1) * 255.

    # Network Definition
    image_resized = tf.image.resize_images(image, size=[224, 224])

    # Measurements
    cc = []
    mse = []
    psnr = []
    ssim = []

    if FLAGS.loss == 'PoseNet':

        y_pred, _ = inception.inception_v3(image_resized,
                                           num_classes=7,
                                           is_training=False)
        quaternion_pred, translation_pred = tf.split(y_pred, [4, 3], axis=1)

        sess = tf.Session()

        ckpt_file = tf.train.latest_checkpoint(FLAGS.model_dir)
        tf.train.Saver().restore(sess, ckpt_file)
        print('restoring parameters from', ckpt_file)

        SO3_GROUP = SpecialOrthogonalGroup(3)

        for i in tqdm.tqdm(range(FLAGS.n_iter)):

            _image, _quaternion_true, _translation_true, _quaternion_pred, _translation_pred = \
                sess.run([image, qt, AP2, quaternion_pred, translation_pred])

            rx = SO3_GROUP.matrix_from_quaternion(_quaternion_pred)[0]
            tx = _translation_pred[0] * 60.

            image_true = np.squeeze(_image)
            image_pred = resample_sitk(fixed_image_sitk, rx, tx)

            imageio.imsave('imgdump/image_{}_true.png'.format(i),
                           np.uint8(_image[0, ...]))
            imageio.imsave('imgdump/image_{}_pred.png'.format(i),
                           np.uint8(image_pred))

            cc.append(calc_correlation(image_pred, image_true))
            mse.append(calc_mse(image_pred, image_true))
            psnr.append(calc_psnr(image_pred, image_true))
            ssim.append(calc_ssim(image_pred, image_true))

    elif FLAGS.loss == 'AP':

        y_pred, _ = inception.inception_v3(image_resized,
                                           num_classes=9,
                                           is_training=False)
        AP1_pred, AP2_pred, AP3_pred = tf.split(y_pred, 3, axis=1)

        sess = tf.Session()

        ckpt_file = tf.train.latest_checkpoint(FLAGS.model_dir)
        tf.train.Saver().restore(sess, ckpt_file)
        print('restoring parameters from', ckpt_file)

        for i in tqdm.tqdm(range(FLAGS.n_iter)):

            _image, _AP1, _AP2, _AP3, _AP1_pred, _AP2_pred, _AP3_pred = \
                sess.run([image, AP1, AP2, AP3, AP1_pred, AP2_pred, AP3_pred])

            dist_ap1 = np.linalg.norm(_AP1 - _AP1_pred)
            dist_ap2 = np.linalg.norm(_AP2 - _AP2_pred)
            dist_ap3 = np.linalg.norm(_AP3 - _AP3_pred)

            rx = matrix_from_anchor_points(_AP1_pred[0], _AP2_pred[0],
                                           _AP3_pred[0])
            tx = _AP2_pred[0] * 60.

            image_true = np.squeeze(_image)
            image_pred = resample_sitk(fixed_image_sitk, rx, tx)

            imageio.imsave('imgdump/image_{}_true.png'.format(i),
                           np.uint8(_image[0, ...]))
            imageio.imsave('imgdump/image_{}_pred.png'.format(i),
                           np.uint8(image_pred))

            cc.append(calc_correlation(image_pred, image_true))
            mse.append(calc_mse(image_pred, image_true))
            psnr.append(calc_psnr(image_pred, image_true))
            ssim.append(calc_ssim(image_pred, image_true))

    elif FLAGS.loss == 'SE3':

        y_pred, _ = inception.inception_v3(image_resized,
                                           num_classes=6,
                                           is_training=False)

        sess = tf.Session()

        ckpt_file = tf.train.latest_checkpoint(FLAGS.model_dir)
        tf.train.Saver().restore(sess, ckpt_file)
        print('restoring parameters from', ckpt_file)

        SO3_GROUP = SpecialOrthogonalGroup(3)
        SE3_GROUP = SpecialEuclideanGroup(3)
        _se3_err_i = []

        for i in tqdm.tqdm(range(FLAGS.n_iter)):

            _image, _rvec, _tvec, _y_pred = \
                sess.run([image, vec, AP2, y_pred])

            rx = SO3_GROUP.matrix_from_rotation_vector(_y_pred[0, :3])[0]
            tx = _y_pred[0, 3:] * 60.

            image_true = np.squeeze(_image)
            image_pred = resample_sitk(fixed_image_sitk, rx, tx)

            imageio.imsave('imgdump/image_{}_true.png'.format(i),
                           np.uint8(_image[0, ...]))
            imageio.imsave('imgdump/image_{}_pred.png'.format(i),
                           np.uint8(image_pred))

            cc.append(calc_correlation(image_pred, image_true))
            mse.append(calc_mse(image_pred, image_true))
            psnr.append(calc_psnr(image_pred, image_true))
            ssim.append(calc_ssim(image_pred, image_true))

            _y_true = np.concatenate((_rvec, _tvec), axis=-1)
            _se3_err_i.append(
                SE3_GROUP.compose(SE3_GROUP.inverse(_y_true), _y_pred))

        err_vec = np.vstack(_se3_err_i)
        err_weights = np.diag(np.linalg.inv(np.cov(err_vec.T)))
        err_weights = err_weights / np.linalg.norm(err_weights)
        print(err_weights)

    else:
        print('Invalid Option:', FLAGS.loss)
        raise SystemExit

    cc = np.stack(cc)
    mse = np.stack(mse)
    psnr = np.stack(psnr)
    ssim = np.stack(ssim)

    print('CC:', np.median(cc))
    print('MSE:', np.median(mse))
    print('PSNR:', np.median(psnr))
    print('SSIM:', np.median(ssim))
def main(args):

    SE3_GROUP = SpecialEuclideanGroup(3, epsilon=FLAGS.epsilon)
    metric = SE3_GROUP.left_canonical_metric

    reader_train = PoseNetReader([FLAGS.dataset])

    # Get Input Tensors
    image, y_true = reader_train.read_and_decode(FLAGS.batch_size)

    # Construct model and encapsulating all ops into scopes, making
    # Tensorboard's Graph visualization more convenient
    print('Making Model')
    with tf.name_scope('Model'):
        py_x, _ = inception.inception_v1(tf.cast(image, tf.float32), num_classes=6)
        # tanh(pred_angle) required to prevent infinite spins on rotation axis
        y_pred = tf.concat((tf.nn.tanh(py_x[:, :3]), py_x[:, 3:]), axis=1)
        loss = tf.reduce_mean(lie_group.loss(y_pred, y_true, SE3_GROUP, metric))

    print('Making Optimizer')
    with tf.name_scope('Adam'):
        # Adam Optimizer
        train_op = tf.train.AdamOptimizer(FLAGS.init_lr).minimize(loss)

    # Initialize the variables (i.e. assign their default value)
    print('Initializing Variables...')
    init_op = tf.group(tf.global_variables_initializer(),
                       tf.local_variables_initializer())

    # Create a summary to monitor cost tensor
    tf.summary.scalar('loss', loss)

    # Merge all summaries into a single op
    merged_summary_op = tf.summary.merge_all()

    # Main Training Routine
    with tf.Session() as sess:
        # Run the initializer
        sess.run(init_op)

        # Start Queue Threads
        coord = tf.train.Coordinator()
        threads = tf.train.start_queue_runners(coord=coord)

        # op to write logs to Tensorboard
        summary_writer = tf.summary.FileWriter(FLAGS.logs_path, graph=tf.get_default_graph())

        saver = tf.train.Saver()
        if FLAGS.resume:
            print('Resuming training.')
            latest_checkpoint = tf.train.latest_checkpoint(FLAGS.model_dir)
            saver.restore(sess, latest_checkpoint)

        # Training cycle
        try:
            train_range = tqdm(range(FLAGS.max_iter))
            for i in train_range:

                _, _cost, summary = sess.run([train_op, loss, merged_summary_op])

                # Write logs at every iteration
                train_range.set_description('Training: (loss=%g)' % _cost)
                summary_writer.add_summary(summary, i)

                if i % FLAGS.snapshot == 0:
                    save_path = saver.save(sess, '{}/chkpt{}.ckpt'.format(FLAGS.model_dir, i))

        except KeyboardInterrupt:
            print('KeyboardInterrupt!')

        finally:
            print('Stopping Threads')
            coord.request_stop()
            coord.join(threads)
            print('Saving iter: ', i)
            save_path = saver.save(sess, FLAGS.model_dir + str(i) + '.ckpt')
Пример #13
0
    def process(self, config_fp, model_name, res_fp):
        # Loading Config
        config_abs_fp = os.path.join(os.path.dirname(__file__), config_fp)
        config = Box(yaml.load(open(config_abs_fp, 'r').read()))
        config = config[model_name]
        print("Evaluating {}.".format(model_name))
        print("Config is  {}".format(config))

        # Loading Inference Server
        inference_server = PBServer(config=config)
        with inference_server.graph.as_default():
            # Loading TFRecords
            batch_iterator, batch_init_op = get_iterator_from_tfrecords(
                config=config, test=True)
            batch_next_op = batch_iterator.get_next()
        assert len(config.data.tfrecords_test_dirs
                   ) == 1, "Testing data larger than two :{}".format(
                       config.data.tfrecords_test_dirs)
        test_name = config.data.tfrecords_test_dirs[0].split('/')[-1]
        print("Testing data name: {}".format(test_name))
        # Config metrics

        SE3_GROUP = SpecialEuclideanGroup(3, epsilon=np.finfo(np.float32).eps)
        metric = SE3_GROUP.left_canonical_metric

        # Loading Data Configurations
        cnt = 0
        se3_errors = []
        se3_noises = []
        se3_preds = []
        se3_gts = []
        RRs = []
        with inference_server.session as sess:
            sess.run(batch_init_op)
            try:
                while True:
                    _x_dm_batch, _x_cam_batch, _gt_se3param = sess.run([
                        batch_next_op['x_dm'], batch_next_op['x_cam'],
                        batch_next_op['y_se3param']
                    ])
                    _x_cam_batch = _x_cam_batch / 255.
                    _y_hat_se3param = inference_server.inference(
                        data=[_x_dm_batch, _x_cam_batch])

                    se3_error = self.cal_metrics(
                        se3_pred=np.squeeze(np.array(_y_hat_se3param), 0),
                        se3_true=np.array(_gt_se3param),
                        group=SE3_GROUP,
                        metric=metric)
                    se3_noise = self.cal_metrics(
                        se3_pred=np.zeros_like(np.array(_gt_se3param)),
                        se3_true=np.array(_gt_se3param),
                        group=SE3_GROUP,
                        metric=metric)
                    RR = np.array(se3_error) / np.array(se3_noise)
                    RRs.append(RR)
                    MRR = 1 - np.mean(np.array(RRs))
                    if cnt == 1:
                        print("MRR Explain:")
                        print("RR = np.array(se3_error) / np.array(se3_noise)")
                        print("np.array(se3_error) = {}".format(
                            np.array(se3_error)))
                        print("np.array(se3_noise) = {}".format(
                            np.array(se3_noise)))
                        print("RR                   = {}".format(RR))
                        print("{} / {} = {}".format(
                            np.array(se3_error)[0],
                            np.array(se3_noise)[0], RR[0]))
                    se3_gts.append(np.array(_gt_se3param))
                    se3_preds.append(np.array(_y_hat_se3param))
                    se3_noises.append(se3_noise)
                    se3_noises_mean = np.mean(np.array(se3_noises))
                    se3_errors.append(se3_error)
                    se3_errors_mean = np.mean(np.array(se3_errors))
                    print("{} ~ {} Test --> {} | {} | {}%".format(
                        cnt * config.train.batch_size,
                        (1 + cnt) * config.train.batch_size, se3_errors_mean,
                        se3_noises_mean, MRR * 100.))
                    cnt += 1

            except tf.errors.OutOfRangeError as e:
                print("End of data .")
        print("Final:  {} ({}%)".format(np.round(se3_errors_mean, 4),
                                        np.round(MRR * 100., 4)))

        if not os.path.isdir(res_fp):
            print("{} does not exits, creating one.".format(res_fp))
            pathlib.Path(res_fp).mkdir(parents=True, exist_ok=True)
        np.save(
            '{}/{}_{}_se3_errors.npy'.format(res_fp, test_name, model_name),
            np.array(se3_errors))
        np.save(
            '{}/{}_{}_se3_noises.npy'.format(res_fp, test_name, model_name),
            np.array(se3_noises))
        np.save('{}/{}_{}_se3_gt.npy'.format(res_fp, test_name, model_name),
                np.array(se3_gts))
        np.save('{}/{}_{}_se3_preds.npy'.format(res_fp, test_name, model_name),
                np.array(se3_preds))
Пример #14
0
def main(argv):

    # TF Record
    datafiles = FLAGS.data_dir + '/test/' + FLAGS.subject_id + '.tfrecord'
    dataset = tf.data.TFRecordDataset(datafiles)
    dataset = dataset.map(_parse_function_ifind)
    # dataset = dataset.repeat()
    # dataset = dataset.shuffle(FLAGS.queue_buffer)
    dataset = dataset.batch(1)
    image, vec, qt, AP1, AP2, AP3 = dataset.make_one_shot_iterator().get_next()

    # Nifti Volume
    subject_path = FLAGS.scan_dir + '/test/' + FLAGS.subject_id + '.nii.gz'
    fixed_image_sitk_tmp = sitk.ReadImage(subject_path, sitk.sitkFloat32)
    fixed_image_sitk = sitk.GetImageFromArray(
        sitk.GetArrayFromImage(fixed_image_sitk_tmp))
    fixed_image_sitk = sitk.RescaleIntensity(fixed_image_sitk, 0, 1)  # * 255.

    # Network Definition
    image_input = tf.placeholder(shape=[1, 224, 224, 1], dtype=tf.float32)
    image_resized = tf.image.resize_images(image, size=[224, 224])

    if FLAGS.loss == 'PoseNet':

        y_pred, _ = inception.inception_v3(image_input, num_classes=7)
        quaternion_pred, translation_pred = tf.split(y_pred, [4, 3], axis=1)

        sess = tf.Session()

        ckpt_file = tf.train.latest_checkpoint(FLAGS.model_dir)
        tf.train.Saver().restore(sess, ckpt_file)
        print('restoring parameters from', ckpt_file)

        SO3_GROUP = SpecialOrthogonalGroup(3)

        for i in range(FLAGS.n_iter):

            _image, _image_resized, _quaternion_true, _translation_true = \
                sess.run([image, image_resized, qt, AP2], )

            _quaternion_pred_sample = []
            _translation_pred_sample = []
            for j in range(FLAGS.n_samples):
                _quaternion_pred_i, _translation_pred_i = \
                    sess.run([quaternion_pred, translation_pred],
                             feed_dict={image_input: _image_resized})
                _quaternion_pred_sample.append(_quaternion_pred_i)
                _translation_pred_sample.append(_translation_pred_i)
                print(_quaternion_pred_i, _translation_pred_i)

            _quaternion_pred_sample = np.vstack(_quaternion_pred_sample)
            _rotvec_pred_sample = SO3_GROUP.rotation_vector_from_quaternion(
                _quaternion_pred_sample)
            _rotvec_pred = SO3_GROUP.left_canonical_metric.mean(
                _rotvec_pred_sample)

            _quaternion_pred = SO3_GROUP.quaternion_from_rotation_vector(
                _rotvec_pred)
            _translation_pred = np.mean(np.vstack(_translation_pred_sample),
                                        axis=0)

            # _quaternion_pred_variance = SO3_GROUP.left_canonical_metric.variance(_rotvec_pred_sample)
            _translation_pred_variance = np.var(
                np.vstack(_translation_pred_sample), axis=0)

            rx = SO3_GROUP.matrix_from_quaternion(_quaternion_pred)[0]
            tx = _translation_pred[0] * 60.

            image_true = np.squeeze(_image)
            image_pred = resample_sitk(fixed_image_sitk, rx, tx)

            imageio.imsave('imgdump/image_{}_true.png'.format(i), _image[0,
                                                                         ...])
            imageio.imsave('imgdump/image_{}_pred.png'.format(i), image_pred)

            calc_psnr(image_pred, image_true)
            calc_mse(image_pred, image_true)
            calc_ssim(image_pred, image_true)
            calc_correlation(image_pred, image_true)

    elif FLAGS.loss == 'AP':

        y_pred, _ = inception.inception_v3(image_input, num_classes=9)
        AP1_pred, AP2_pred, AP3_pred = tf.split(y_pred, 3, axis=1)

        sess = tf.Session()

        ckpt_file = tf.train.latest_checkpoint(FLAGS.model_dir)
        tf.train.Saver().restore(sess, ckpt_file)
        print('restoring parameters from', ckpt_file)

        for i in range(FLAGS.n_iter):

            _image, _image_resized, _AP1, _AP2, _AP3 = \
                sess.run([image, image_resized, AP1, AP2, AP3])

            _AP1_sample = []
            _AP2_sample = []
            _AP3_sample = []
            for j in range(FLAGS.n_samples):
                _AP1_pred_i, _AP2_pred_i, _AP3_pred_i = \
                    sess.run([AP1_pred, AP2_pred, AP3_pred],
                             feed_dict={image_input: _image_resized})
                _AP1_sample.append(_AP1_pred_i)
                _AP2_sample.append(_AP2_pred_i)
                _AP3_sample.append(_AP3_pred_i)

            _AP1_pred = np.mean(np.vstack(_AP1_sample), axis=0)
            _AP2_pred = np.mean(np.vstack(_AP2_sample), axis=0)
            _AP3_pred = np.mean(np.vstack(_AP3_sample), axis=0)

            _AP1_pred_variance = np.var(np.vstack(_AP1_sample), axis=0)
            _AP2_pred_variance = np.var(np.vstack(_AP2_sample), axis=0)
            _AP3_pred_variance = np.var(np.vstack(_AP3_sample), axis=0)

            dist_ap1 = np.linalg.norm(_AP1 - _AP1_pred)
            dist_ap2 = np.linalg.norm(_AP2 - _AP2_pred)
            dist_ap3 = np.linalg.norm(_AP3 - _AP3_pred)

            rx = matrix_from_anchor_points(_AP1_pred[0], _AP2_pred[0],
                                           _AP3_pred[0])
            tx = _AP2_pred[0] * 60.

            image_true = np.squeeze(_image)
            image_pred = resample_sitk(fixed_image_sitk, rx, tx)

            imageio.imsave('imgdump/image_{}_true.png'.format(i), _image[0,
                                                                         ...])
            imageio.imsave('imgdump/image_{}_pred.png'.format(i), image_pred)

            calc_psnr(image_pred, image_true)
            calc_mse(image_pred, image_true)
            calc_ssim(image_pred, image_true)
            calc_correlation(image_pred, image_true)

    elif FLAGS.loss == 'SE3':

        y_pred, _ = inception.inception_v3(image_input, num_classes=6)

        sess = tf.Session()

        ckpt_file = tf.train.latest_checkpoint(FLAGS.model_dir)
        tf.train.Saver().restore(sess, ckpt_file)
        print('restoring parameters from', ckpt_file)

        SO3_GROUP = SpecialOrthogonalGroup(3)
        SE3_GROUP = SpecialEuclideanGroup(3)

        for i in range(FLAGS.n_iter):

            print(i)

            _image, _image_resized, _rvec, _tvec = \
                sess.run([image, image_resized, vec, AP2])

            _y_pred_sample = []
            for j in range(FLAGS.n_samples):
                _y_pred_i = sess.run([y_pred],
                                     feed_dict={image_input: _image_resized})
                _y_pred_sample.append(_y_pred_i[0])

            _y_pred_sample = np.vstack(_y_pred_sample)
            _y_pred = SE3_GROUP.left_canonical_metric.mean(_y_pred_sample)
            _y_pred_variance = SE3_GROUP.left_canonical_metric.variance(
                _y_pred_sample)

            rx = SO3_GROUP.matrix_from_rotation_vector(_y_pred[0, :3])[0]
            tx = _y_pred[0, 3:] * 60.

            image_true = np.squeeze(_image)
            image_pred = resample_sitk(fixed_image_sitk, rx, tx)

            imageio.imsave('imgdump/image_{}_true.png'.format(i), _image[0,
                                                                         ...])
            imageio.imsave('imgdump/image_{}_pred.png'.format(i), image_pred)

            calc_psnr(image_pred, image_true)
            calc_mse(image_pred, image_true)
            calc_ssim(image_pred, image_true)
            calc_correlation(image_pred, image_true)

    else:
        print('Invalid Option:', FLAGS.loss)
        raise SystemExit
Пример #15
0
import os
import yaml
import numpy as np
from box import Box
import geomstats.lie_group as lie_group
from geomstats.special_euclidean_group import SpecialEuclideanGroup
import liegroups

SE3_GROUP = SpecialEuclideanGroup(3, epsilon=np.finfo(np.float32).eps)
metric = SE3_GROUP.left_canonical_metric


def load_config(config_fp):
    config_abs_fp = os.path.join(os.path.dirname(__file__), config_fp)
    config = Box(yaml.load(open(config_abs_fp, 'r').read()))
    return config


def cal_se3_error(se3_pred, se3_true):
    se3_error = lie_group.loss(se3_pred, se3_true, SE3_GROUP, metric)
    return se3_error


def SE3_to_se3(SE3_matrix):
    # This liegroups lib represent se3 with first 3 element as translation, which is different than us
    se3_rot_last = liegroups.SE3.log(
        liegroups.SE3.from_matrix(SE3_matrix, normalize=True))
    se3 = np.zeros_like(se3_rot_last)
    se3[:3] = se3_rot_last[3:]
    se3[3:] = se3_rot_last[:3]
    return se3
Пример #16
0
def main(argv):

    # TF Record
    dataset = tf.data.TFRecordDataset(FLAGS.data_dir +
                                      '/dataset_test.tfrecords')
    dataset = dataset.map(_parse_function_kingscollege)
    # dataset = dataset.repeat()
    # dataset = dataset.shuffle(FLAGS.queue_buffer)
    dataset = dataset.batch(1)
    image, vec, pose_q, pose_x = dataset.make_one_shot_iterator().get_next()

    # Network Definition
    image_resized = tf.image.resize_images(image, size=[224, 224])

    if FLAGS.loss == 'PoseNet':

        y_pred, _ = inception.inception_v3(image_resized,
                                           num_classes=7,
                                           is_training=False)
        quaternion_pred, translation_pred = tf.split(y_pred, [4, 3], axis=1)

        sess = tf.Session()

        ckpt_file = tf.train.latest_checkpoint(FLAGS.model_dir)
        tf.train.Saver().restore(sess, ckpt_file)
        print('restoring parameters from', ckpt_file)

        i = 0

        results = []

        try:

            while True:
                _image, _quaternion_true, _translation_true, _quaternion_pred, _translation_pred = \
                    sess.run([image, pose_q, pose_x, quaternion_pred, translation_pred])

                # Compute Individual Sample Error
                q1 = _quaternion_true / np.linalg.norm(_quaternion_true)
                q2 = _quaternion_pred / np.linalg.norm(_quaternion_pred)
                d = abs(np.sum(np.multiply(q1, q2)))
                theta = 2. * np.arccos(d) * 180. / np.pi
                error_x = np.linalg.norm(_translation_true - _translation_pred)

                results.append([error_x, theta])

                print('Iteration:', i, 'Error XYZ (m):', error_x,
                      'Error Q (degrees):', theta)
                i = i + 1

        except tf.errors.OutOfRangeError:
            print('End of Test Data')

        results = np.stack(results)
        results = np.median(results, axis=0)
        print('Error XYZ (m):', results[0], 'Error Q (degrees):', results[1])

    elif FLAGS.loss == 'SE3':

        y_pred, _ = inception.inception_v3(image_resized,
                                           num_classes=6,
                                           is_training=False)

        sess = tf.Session()

        ckpt_file = tf.train.latest_checkpoint(FLAGS.model_dir)
        tf.train.Saver().restore(sess, ckpt_file)
        print('restoring parameters from', ckpt_file)

        SO3_GROUP = SpecialOrthogonalGroup(3)
        SE3_GROUP = SpecialEuclideanGroup(3)
        metric = InvariantMetric(group=SE3_GROUP,
                                 inner_product_mat_at_identity=np.eye(6),
                                 left_or_right='left')

        i = 0

        results = []
        _y_pred_i = []
        _y_true_i = []
        _se3_err_i = []

        try:

            while True:
                _image, _rvec, _qvec, _tvec, _y_pred = \
                    sess.run([image, vec, pose_q, pose_x, y_pred])

                _quaternion_true = _qvec
                _quaternion_pred = SO3_GROUP.quaternion_from_rotation_vector(
                    _y_pred[0, :3])[0]

                # Compute Individual Sample Error
                q1 = _quaternion_true / np.linalg.norm(_quaternion_true)
                q2 = _quaternion_pred / np.linalg.norm(_quaternion_pred)
                d = abs(np.sum(np.multiply(q1, q2)))
                theta = 2. * np.arccos(d) * 180. / np.pi
                error_x = np.linalg.norm(_tvec - _y_pred[0, 3:])
                results.append([error_x, theta])

                # SE3 compute
                _y_true = np.concatenate((_rvec, _tvec), axis=-1)
                se3_dist = metric.squared_dist(_y_pred, _y_true)[0]

                _y_pred_i.append(_y_pred)
                _y_true_i.append(_y_true)
                _se3_err_i.append(
                    SE3_GROUP.compose(SE3_GROUP.inverse(_y_true), _y_pred))

                print('Iteration:', i, 'Error XYZ (m):', error_x,
                      'Error Q (degrees):', theta, 'SE3 dist:', se3_dist)
                i = i + 1

        except tf.errors.OutOfRangeError:
            print('End of Test Data')

        # Calculate SE3 Error Weights
        err_vec = np.vstack(_se3_err_i)
        err_weights = np.diag(np.linalg.inv(np.cov(err_vec.T)))
        err_weights = err_weights / np.linalg.norm(err_weights)
        print(err_weights)
        results = np.stack(results)
        results = np.median(results, axis=0)
        print('Error XYZ (m):', results[0], 'Error Q (degrees):', results[1])

    else:
        print('Invalid Option:', FLAGS.loss)
        raise SystemExit