예제 #1
0
 def processData(data):
     if isinstance(data, Quaternion):
         return data
     elif isinstance(data, list) and len(data) == 4:
         return Quaternion(data)
     else:
         return Quaternion()
예제 #2
0
    async def nut(self):
        my_qq = np.array([1, 0, 0, 0], dtype=np.float64)
        my_off = Quaternion()
        my_off2 = Quaternion.from_x_rotation(np.pi/2)*Quaternion.from_y_rotation(np.pi/2)

        mig = ahrs.filters.Madgwick(frequency=100.0)
        gg = []
        while self.coro_keep_alive['nut'].is_alive:
            try:
                if self.last_read:
                    gg = u.get_numbers_from_text(self.last_read.decode().strip('\r'), ',')
                    self.last_read = b''

                if len(gg) == 9 and len(self.poses) > 3:
                    # my_qq = mig.updateIMU(my_qq, gg[3:6], gg[6:])
                    my_qq = mig.updateMARG(my_qq, gg[3:6], gg[6:], gg[:3])
                    # print (my_qq)
                    temp = Quaternion(my_off*Quaternion([*my_qq[1:], my_qq[0]])*my_off2).normalised
                    self.poses[3].r_w = temp[3]
                    self.poses[3].r_y = temp[0]
                    self.poses[3].r_z = temp[1]
                    self.poses[3].r_x = temp[2]

                    if self._serialResetYaw:
                        my_off = (Quaternion([*my_qq[1:], my_qq[0]])*my_off2).inverse.normalised
                        mig = ahrs.filters.Madgwick(frequency=100.0)

            except Exception as e:
                print (f'nut: dead, reason: {e}')
                # break

            await asyncio.sleep(self.coro_keep_alive['nut'].sleep_delay)
예제 #3
0
async def serial_listener():
    try:
        irl_rot_off = Quaternion.from_x_rotation(
            0
        )  # supply your own imu offsets here, has to be a Quaternion object

        with serial.Serial(SERIAL_PORT, SERIAL_BAUD, timeout=1 / 4) as ser:
            with serial.threaded.ReaderThread(
                    ser, u.SerialReaderFactory) as protocol:
                protocol.write_line("nut")
                await asyncio.sleep(5)

                print('serial_listener: starting tracking thread')

                while poser.coro_keep_alive["serial_listener"].is_alive:
                    gg = u.get_numbers_from_text(protocol.last_read, ",")

                    if len(gg) >= 4:
                        w, x, y, z, *_ = gg
                        my_q = Quaternion([-y, z, -x, w])

                        my_q = Quaternion(my_q * irl_rot_off).normalised
                        poser.poses[0].r_w = round(my_q[3], 5)
                        poser.poses[0].r_x = round(my_q[0], 5)
                        poser.poses[0].r_y = round(my_q[1], 5)
                        poser.poses[0].r_z = round(my_q[2], 5)

                    await asyncio.sleep(
                        poser.coro_keep_alive["serial_listener"].sleep_delay)

    except Exception as e:
        print(f'serial_listener: failed {e}')

    poser.coro_keep_alive["serial_listener"].is_alive = False
예제 #4
0
 def test_euler_to_quaternion_1(self):
     v = np.radians(Vector3([0, 0, 0]))
     self.assertQuaternionAreEqual(euler_angles_to_quaternion(v), (Quaternion([0, 0, 0, 1])))
     v = np.radians([0, 0, 180])
     self.assertQuaternionAreEqual(euler_angles_to_quaternion(v), (Quaternion([0, 0, 1, 0])))
     v = np.radians([0, 180, 0])
     self.assertQuaternionAreEqual(euler_angles_to_quaternion(v), (Quaternion([0, 1, 0, 0])))
     v = np.radians([180, 0, 0])
     self.assertQuaternionAreEqual(euler_angles_to_quaternion(v), (Quaternion([1, 0, 0, 0])))
예제 #5
0
파일: IMU.py 프로젝트: sahasam/hobo_vr
def from_to_quaternion(v1, v2, is_unit=True, epsilon=1e-5):
    # https://stackoverflow.com/a/11741520/782170
    if not is_unit:
        v1 = unit_vector(v1)
        v2 = unit_vector(v2)

    k_cos_theta = np.dot(v1, v2)
    k = np.sqrt(np.dot(v1, v1) * np.dot(v2, v2))

    if abs(k_cos_theta / k + 1) < epsilon:
        # 180 degree rotation around any orthogonal vector
        return Quaternion(0, unit_vector(perpendicular_vector(v1)))

    q = Quaternion(k_cos_theta + k, *np.cross(v1, v2))
    return q.normalize
예제 #6
0
    def test_oo_examples(self):
        from pyrr import Quaternion, Matrix44, Vector3
        import numpy as np

        point = Vector3([1., 2., 3.])
        orientation = Quaternion()
        translation = Vector3()
        scale = Vector3([1., 1., 1.])

        # translate along X by 1
        translation += [1.0, 0.0, 0.0]

        # rotate about Y by pi/2
        rotation = Quaternion.from_y_rotation(np.pi / 2.0)
        orientation = rotation * orientation

        # create a matrix
        # start our matrix off using the scale
        matrix = Matrix44.from_scale(scale)

        # apply our orientation
        # we can multiply matricies and quaternions directly!
        matrix = matrix * orientation

        # apply our translation
        translation = Matrix44.from_translation(translation)
        matrix = matrix * translation

        # transform our point by the matrix
        # vectors are transformable by matrices and quaternions directly
        point = matrix * point
예제 #7
0
def euler_angles_to_quaternion(eulers, dtype=None):
    """Creates a quaternion from a set of Euler angles.

    Rotation order for euler angles is YZX.

    See: http://www.euclideanspace.com/maths/geometry/rotations/conversions/eulerToQuaternion/index.htm

    """
    dtype = dtype or eulers.dtype

    heading_half = eulers[1] / 2.
    attitude_half = eulers[2] / 2.
    bank_half = eulers[0] / 2.

    c1 = np.math.cos(heading_half)
    s1 = np.math.sin(heading_half)
    c2 = np.math.cos(attitude_half)
    s2 = np.math.sin(attitude_half)
    c3 = np.math.cos(bank_half)
    s3 = np.math.sin(bank_half)
    c1c2 = c1 * c2
    s1s2 = s1 * s2
    return Quaternion(
        [
            c1c2 * s3 + s1s2 * c3,
            s1 * c2 * c3 + c1 * s2 * s3,
            c1 * s2 * c3 - s1 * c2 * s3,
            c1c2 * c3 - s1s2 * s3,
        ],
        dtype=dtype
    )
예제 #8
0
    async def serial_listener3(self):
        """Get orientation data from serial."""
        irl_rot_off = Quaternion.from_x_rotation(
            np.pi /
            3)  # imu on this controller is rotated 90 degrees irl for me

        my_off = Quaternion()

        if not check_serial_dict(self.serialPaths, 'cont_r'):
            print(
                'failed to init serial_listener3: bad serial dict for key \'cont_r\''
            )
            self.coro_keep_alive["serial_listener3"][0] = False
            return

        with serial.Serial(self.serialPaths["cont_r"], 115200,
                           timeout=1 / 5) as ser2:
            with serial.threaded.ReaderThread(
                    ser2, u.SerialReaderFactory) as protocol:
                protocol.write_line("nut")
                await asyncio.sleep(5)

                while self.coro_keep_alive["serial_listener3"][0]:
                    try:
                        gg = u.get_numbers_from_text(protocol.last_read, ',')

                        if len(gg) > 0:
                            w, x, y, z = gg
                            my_q = Quaternion([-y, z, -x, w])

                            if self._serialResetYaw:
                                my_off = Quaternion([0, z, 0,
                                                     w]).inverse.normalised

                            my_q = my_off * my_q * irl_rot_off
                            self.pose_controller_r.r_w = round(my_q[3], 5)
                            self.pose_controller_r.r_x = round(my_q[0], 5)
                            self.pose_controller_r.r_y = round(my_q[1], 5)
                            self.pose_controller_r.r_z = round(my_q[2], 5)

                        await asyncio.sleep(
                            self.coro_keep_alive["serial_listener3"][1])

                    except Exception as e:
                        print(f"{self.serial_listener.__name__}: {e}")
                        break
        self.coro_keep_alive["serial_listener3"][0] = False
예제 #9
0
    def __compute_bbox_coords(self, mesh, view):
        upright = Quaternion.from_y_rotation(np.pi / 2)
        rotation = Quaternion(mesh['rotation'])
        center = mesh['translation']
        world_corners = {
            'top_left':
            Vector3([center[0], center[1], center[2]]) +
            (rotation * upright *
             Vector3([0, mesh['width'] / 2, mesh['height'] / 2])),
            'top_right':
            Vector3([center[0], center[1], center[2]]) +
            (rotation * upright *
             Vector3([0, -mesh['width'] / 2, mesh['height'] / 2])),
            'bottom_right':
            Vector3([center[0], center[1], center[2]]) +
            (rotation * upright *
             Vector3([0, -mesh['width'] / 2, -mesh['height'] / 2])),
            'bottom_left':
            Vector3([center[0], center[1], center[2]]) +
            (rotation * upright *
             Vector3([0, mesh['width'] / 2, -mesh['height'] / 2]))
        }

        hidden_corners = 0
        left = right = top = bottom = None

        for key, value in world_corners.items():
            img_coords = self.__project_to_img_frame(value, view)
            if (img_coords[0] < 10 or img_coords[0] > (self.width - 10)
                    or img_coords[1] < 10 or img_coords[1] >
                (self.height - 10)):
                hidden_corners += 1
            if left is None or (img_coords[0] < left['x']):
                left = {'x': img_coords[0], 'y': img_coords[1]}
            if top is None or (img_coords[1] < top['y']):
                top = {'x': img_coords[0], 'y': img_coords[1]}
            if bottom is None or (img_coords[1] > bottom['y']):
                bottom = {'x': img_coords[0], 'y': img_coords[1]}
            if right is None or (img_coords[0] > right['x']):
                right = {'x': img_coords[0], 'y': img_coords[1]}

        image_corners = {
            'min': [int(left['x']), int(top['y'])],
            'max': [int(right['x']), int(bottom['y'])]
        }

        if hidden_corners > 3:
            return {}
        elif hidden_corners > 0:
            for key, img_coords in image_corners.items():
                for i in range(0, 2):
                    if img_coords[i] < 0:
                        img_coords[i] = 0
                    elif img_coords[i] > (self.width
                                          if i == 0 else self.height):
                        img_coords[i] = self.width if i == 0 else self.height
                image_corners[key] = img_coords

        return image_corners
예제 #10
0
        def buildBoneMatrices(tree, parentmat):
            trans = tree['transform']
            translate = Matrix44.from_translation(trans['position'])
            rotate = Quaternion(trans['rotation']).matrix44
            mat = mats[tree['bone']] = rotate * translate * parentmat

            for x in tree['children']:
                buildBoneMatrices(x, mat)
예제 #11
0
    def test_conversions(self):
        from pyrr import Quaternion, Matrix33, Matrix44, Vector3, Vector4

        v3 = Vector3([1., 0., 0.])
        v4 = Vector4.from_vector3(v3, w=1.0)
        v3, w = Vector3.from_vector4(v4)

        m44 = Matrix44()
        q = Quaternion(m44)
        m33 = Matrix33(q)

        m33 = Matrix44().matrix33
        m44 = Matrix33().matrix44
        q = Matrix44().quaternion
        q = Matrix33().quaternion

        m33 = Quaternion().matrix33
        m44 = Quaternion().matrix44
예제 #12
0
 def __init__(self):
     self.location = Vector3()
     self.scale = Vector3([1, 1, 1])
     self.rotation = Rotator([0.0, 0.0, 0.0])
     self.quaternion = Quaternion([0, 0, 0, 1])
     self.initial_matrix = Matrix44.identity()
     
     self.transform_matrix = Matrix44.identity()
     # Flag indicate whether the transformation is modified or not
     self.is_changed = False
예제 #13
0
 def setData(self, data):
     if isinstance(data, Quaternion):
         self._data = data
     elif isinstance(data, list) and len(data) == 4:
         # here serialized data will be handled
         # when node desirializes itself, it creates all pins
         # and then sets data to them. Here, data will be set fo the first time after deserialization
         self._data = Quaternion(data)
     else:
         self._data = self.defaultValue()
     PinWidgetBase.setData(self, self._data)
예제 #14
0
    def test_quaternion_matrix_conversion(self):
        # https://au.mathworks.com/help/robotics/ref/quat2rotm.html?requestedDomain=www.mathworks.com
        q = Quaternion([0.7071, 0., 0., 0.7071])
        m33 = q.matrix33
        expected = np.array([
            [1., 0., 0.],
            [0., -0., -1.],
            [0., 1., -0.],
        ])
        self.assertTrue(np.allclose(m33, expected))

        # issue #42
        q = Quaternion([0.80087974, 0.03166748, 0.59114721, -0.09018753])
        m33 = q.matrix33
        q2 = Quaternion.from_matrix(m33)
        print(q, q2)
        self.assertTrue(np.allclose(q, q2))

        q3 = Quaternion.from_matrix(m33.T)
        self.assertFalse(np.allclose(q2, q3))
예제 #15
0
    def __init__(self, name: str = "New Node"):
        """  Node element of scene graph (tree structure).

        Args:
            name: Name for string representation.
        """
        self.children = list()
        self._parent = None
        self.name = name

        self._local_position = Vector3()
        self._world_position = Vector3()

        self._scale = Vector3([1., 1., 1.])

        self._local_quaternion = Quaternion()
        self._world_quaternion = Quaternion()
        self._world_matrix = Matrix44.identity()
        self._local_matrix = Matrix44.identity()

        self.__matrix_needs_update = True
    def parse_annotations(self, path: str):
        if not os.path.isfile(path):
            raise Exception("Annotations file not found")
        annotations = dict()
        with open(path) as file:
            file.readline()  # Discard the header
            for line in file:
                items = line.split(',')
                annotations[items[0].strip()] = BackgroundAnnotations(
                    Vector3([float(x) for x in items[1:4]]),
                    Quaternion([float(x) for x in items[4:8]]))

        return annotations
예제 #17
0
파일: dicy.py 프로젝트: encukou/mufl
    def bounce(self, dist, norm):
        if dist > self.r:
            return False
        norm = numpy.array(norm)

        rot = self.rotation
        rot_mat = rot.matrix33
        low_pt = None
        for xp in -1, 1:
            for yp in -1, 1:
                for zp in -1, 1:
                    point = rot_mat * Vector3([xp, yp, zp])
                    if low_pt is None or point[2] < low_pt[2]:
                        low_pt = point
        if low_pt[2] >= dist:
            return False
        #play_sound('die-hit')
        self.locked = False
        rot_imp = Vector3(norm).cross(Vector3(
            (*low_pt.xy, 0.0))) * (.5 + hypot(*self.speed) / 10)
        riq = Quaternion.from_axis(rot_imp)
        ump = -Vector3(self.speed).cross(norm)
        umpq = Quaternion.from_axis(ump)
        if np.isnan(umpq).any():
            umpq = Quaternion()
        self.rotation_speed = Quaternion().lerp(
            umpq * riq * self.rotation_speed, 0.7)
        self.speed *= DAMPING_BOUNCE

        # Reflect speed
        if linalg_norm(norm) >= 2:
            1 / 0
        s = self.speed
        self.speed = s - 2 * s.dot(norm) * norm

        self.pos += (self.r - dist) * norm
        return True
예제 #18
0
    def test_operators(self):
        from pyrr import Quaternion, Matrix44, Matrix33, Vector3, Vector4
        import numpy as np

        # matrix multiplication
        m = Matrix44() * Matrix33()
        m = Matrix44() * Quaternion()
        m = Matrix33() * Quaternion()

        # matrix inverse
        m = ~Matrix44.from_x_rotation(np.pi)

        # quaternion multiplication
        q = Quaternion() * Quaternion()
        q = Quaternion() * Matrix44()
        q = Quaternion() * Matrix33()

        # quaternion inverse (conjugate)
        q = ~Quaternion()

        # quaternion dot product
        d = Quaternion() | Quaternion()

        # vector oprations
        v = Vector3() + Vector3()
        v = Vector4() - Vector4()

        # vector transform
        v = Quaternion() * Vector3()
        v = Matrix44() * Vector3()
        v = Matrix44() * Vector4()
        v = Matrix33() * Vector3()

        # dot and cross products
        dot = Vector3() | Vector3()
        cross = Vector3() ^ Vector3()
예제 #19
0
    def update_transform_matrix(self):
        scale_matrix = Matrix44.from_scale(self.scale)
        translation_matrix = Matrix44.from_translation(self.location)
        
        # TODO: For some reason the qx, qy, qz part of the quaternion must be flipped
        # Need to understand why and fix it
        # The change need to be made together with the coordinate conversion in NDDS

        # rotation_matrix = Matrix44.from_quaternion(self.quaternion)
        qx, qy, qz, qw = self.quaternion
        test_quaternion = Quaternion([-qx, -qy, -qz, qw])
        rotation_matrix = Matrix44.from_quaternion(test_quaternion)

        # print('update_transform_matrix: rotation_matrix = {}'.format(rotation_matrix))

        relative_matrix = (translation_matrix * scale_matrix * rotation_matrix)
        # self.transform_matrix =  relative_matrix * self.initial_matrix
        self.transform_matrix = relative_matrix
예제 #20
0
 def __generate(self, index):
     baseImage = self.base_dataset.get()
     annotations = []
     for gate in self.gates_config['gates']:
         view = self.__compute_view_matrix(baseImage.annotations)
         coords = self.__compute_bbox_coords(gate, view)
         if coords != {}:
             distance = self.__compute_camera_proximity(
                 gate, baseImage.annotations)
             camera_yaw = self.__euler_yaw(
                 baseImage.annotations.orientation)
             gate_yaw = self.__euler_yaw(Quaternion(gate['rotation']))
             rotation = camera_yaw - gate_yaw
             bbox = {
                 'class_id': 1,
                 'min': [coords['min'][0], coords['min'][1]],
                 'max': [coords['max'][0], coords['max'][1]],
                 'distance': distance,
                 'rotation': rotation
             }
             annotations.append(bbox)
     self.annotated_dataset.put(
         AnnotatedImage(baseImage.image_path(), index, annotations))
예제 #21
0
    def convert_tf_to_transform(self, transform_stamped):
        position_vector = Vector4()
        position_vector.x = transform_stamped.transform.translation.x
        position_vector.y = transform_stamped.transform.translation.y
        position_vector.z = transform_stamped.transform.translation.z
        position_vector.w = 0.

        rotation_quaterniond = Quaternion()
        rotation_quaterniond.w = transform_stamped.transform.rotation.w
        rotation_quaterniond.x = transform_stamped.transform.rotation.x
        rotation_quaterniond.y = transform_stamped.transform.rotation.y
        rotation_quaterniond.z = transform_stamped.transform.rotation.z

        scale = Vector4([1., 1., 1., 1.])

        affine3d = Affine3d(position_vector, rotation_quaterniond, scale)

        transform = Transform(affine3d,
                              transform_stamped.header.frame_id,
                              transform_stamped.child_frame_id,
                              transform_stamped.header.stamp.to_time(),
                              authority="")
        return transform
예제 #22
0
 def test_rotate_z_1(self):
     self.root.rotate_z(180)
     self.assertQuaternionAreEqual(self.root.local_quaternion,
                                   Quaternion([0, 0, 1, 0]))
예제 #23
0
파일: IMU.py 프로젝트: sahasam/hobo_vr
    def get_orientation(self, convergence_acc: float, overshoot_acc: float, convergence_mag: float,
                        overshoot_mag: float):
        # https://www.sciencedirect.com/science/article/pii/S2405896317321201
        if self.prev_up is not None:
            t2 = time.time()

            k_acc, k_bias_acc = get_ki_kbias(convergence_acc, overshoot_acc, t2 - self.prev_orientation_time)
            k_mag, k_bias_mag = get_ki_kbias(convergence_mag, overshoot_mag, t2 - self.prev_orientation_time)

            grav = np.asarray(unit_vector(self.get_grav()))
            mag = unit_vector(np.asarray(self.get_mag()))
            east = unit_vector(np.cross(mag, grav))
            west = -east
            north = -unit_vector(np.cross(east, grav))
            up = -unit_vector(np.cross(north, east))

            gyro = -self.get_gyro()
            if self.prev_bias is not None:
                if np.isnan(self.prev_bias).any():
                    self.prev_bias = None
                else:
                    gyro += self.prev_bias

            gy = Quaternion.from_eulers(gyro * (t2 - self.prev_orientation_time))
            pred_west = (~gy * Quaternion(np.pad(self.prev_west, (0,1), 'constant', constant_values=0)) * gy)
            pred_up = (~gy * Quaternion(np.pad(self.prev_up, (0,1), 'constant', constant_values=0)) * gy)
            pred_north = (~gy * Quaternion(np.pad(self.prev_north, (0,1), 'constant', constant_values=0)) * gy)

            pred_north_v = np.asarray(pred_north.xyz)
            pred_west_v = np.asarray(pred_west.xyz)
            pred_up_v = np.asarray(pred_up.xyz)

            fix_north = Quaternion.from_axis(np.cross(north, pred_north_v) * k_mag)
            fix_west = Quaternion.from_axis(np.cross(west, pred_west_v) * k_mag)
            fix_up = Quaternion.from_axis(np.cross(up, pred_west_v) * k_acc)

            x_acc = np.cross(up, pred_up_v)
            x_mag = np.cross(north, pred_north_v)
            pb0 = k_bias_acc*x_acc + k_bias_mag*x_mag
            if self.prev_bias is not None:
                self.prev_bias = self.prev_bias * (t2 - self.prev_orientation_time) + pb0
            else:
                self.prev_bias = pb0

            true_north = unit_vector((pred_north * fix_north).xyz)
            true_up = unit_vector((pred_up * fix_up).xyz)
            true_west = unit_vector((pred_west * fix_west).xyz)

            rot = np.asarray([true_north, true_west, true_up]).transpose()
            q = Quaternion.from_matrix(rot)

            if not np.isnan(north).any():
                self.prev_north = north
            if not np.isnan(west).any():
                self.prev_west = west
            if not np.isnan(up).any():
                self.prev_up = up

            self.prev_orientation_time = t2

            return q
        else:
            grav = np.asarray(unit_vector(self.get_grav()))
            mag = unit_vector(np.asarray(self.get_mag()))
            east = unit_vector(np.cross(mag, grav))
            north = -unit_vector(np.cross(east, grav))
            up = -unit_vector(np.cross(north, east))

            rot = np.asarray([north, -east, up]).transpose()
            q = Quaternion.from_matrix(rot)
            if (not np.isnan(grav).any()) and (not np.isnan(mag).any()):
                self.prev_west = -east
                self.prev_north = north
                self.prev_up = up
                self.prev_orientation_time = time.time()
            return q
예제 #24
0
 def reset_transform(self):
     self.set_location(Vector3())
     # self.set_rotation(euler.create(0.0, 0.0, 0.0))
     self.set_quaternion(Quaternion([0, 0, 0, 1]))
     self.mark_changed()
예제 #25
0
 def test_euler_1(self):
     self.root.local_euler_angles = Vector3([180, 0, 0])
     self.assertQuaternionAreEqual(self.root.local_quaternion,
                                   Quaternion([1, 0, 0, 0]))
예제 #26
0
 def test_rotate_z_4(self):
     self.root.rotate_z(180, local_space=False)
     self.assertQuaternionAreEqual(self.root.local_quaternion,
                                   Quaternion([0, 0, 1, 0]))
     self.assertQuaternionAreEqual(self.root.world_quaternion,
                                   Quaternion([0, 0, 1, 0]))
예제 #27
0
 def test_rotate_z_3(self):
     self.root.rotate_z(720)
     self.assertQuaternionAreEqual(self.root.local_quaternion,
                                   Quaternion([0, 0, 0, 1]))
예제 #28
0
 def test_rotate_y_2(self):
     self.root.rotate_y(360)
     self.assertQuaternionAreEqual(self.root.local_quaternion,
                                   Quaternion([0, 0, 0, 1]))
예제 #29
0
 def test_quaternion_4(self):
     self.root.world_quaternion = [1, 0, 0, 0]
     self.child_2.local_quaternion = [1, 0, 0, 0]
     self.assertQuaternionAreEqual(self.child_2.world_quaternion,
                                   Quaternion([0, 0, 0, 1]))
예제 #30
0
def predict_eval_and_save_external(steps, hparams, sync_replicas, input_fn, external_options):

    object_to_test = external_options["obj_name"]
    out_folder = external_options["out_folder"]
    ckpt_path = external_options["ckpt_path"]
    dataset_folder = external_options["dataset_folder"]
    mesh_folder = external_options["mesh_folder"]

    run_config = tf.estimator.RunConfig(
        keep_checkpoint_every_n_hours=0.5,
        save_checkpoints_secs=180,
        save_summary_steps=50)

    estimator = tf.estimator.Estimator(
        model_fn=estimator_model_fn,
        params=hparams, config=run_config)

    print("loading model: ", ckpt_path)

    input_fn = input_fn(dataset=object_to_test, folder_tfrecord=dataset_folder, mesh_folder=mesh_folder, batch_size=1)
    predictions = estimator.predict(input_fn=input_fn, checkpoint_path=ckpt_path)

    pose_pred = []
    pose_gt = []

    if not os.path.exists(out_folder):
        os.mkdir(out_folder)
    out_folder_object = os.path.join(out_folder, object_to_test)
    if not os.path.exists(out_folder_object):
        os.mkdir(out_folder_object)

    it_img = 0

    for item in predictions:

        pos_gt = item["pos_gt"]
        quat_gt = item["quat_gt"]

        pos_init = item["pos_init"]
        quat_init = item["quat_init"]

        xy_max_ind = item["xy_out"]
        z_max_att = item["z_out"]

        xy_image_delta = xy_max_ind[:2]
        xy_image = item["im_center"] + xy_image_delta / item["image_scaling"]
        xy_image = np.squeeze(xy_image)

        z_model = pos_init[2]

        cam_mat = item["cam_mat"]

        x_model_cam_frame = (xy_image[0] - cam_mat[0, 2]) / cam_mat[0, 0]
        x_model_cam_frame = x_model_cam_frame * z_model
        y_model_cam_frame = (xy_image[1] - cam_mat[1, 2]) / cam_mat[1, 1]
        y_model_cam_frame = y_model_cam_frame * z_model

        pose_model_lateral = np.array([x_model_cam_frame, y_model_cam_frame, z_model])

        pos_max_att = np.exp(z_max_att) * pose_model_lateral

        quat_init_q = Quaternion(quat_init)
        R_init = np.array(quat_init_q.matrix33)
        quat_out_q = Quaternion(item["quat_out"])
        R_out = np.array(quat_out_q.matrix33)

        pose_pred_it = np.zeros((3, 4))
        pose_pred_it[:, 3] = pos_max_att
        R_pred = np.matmul(R_out, R_init)
        pose_pred_it[:3, :3] = R_pred
        pose_pred.append(pose_pred_it)

        pose_gt_it = np.zeros((3, 4))
        pose_gt_it[:, 3] = pos_gt
        quat_gt = Quaternion(quat_gt)
        R_gt = np.array(quat_gt.matrix33)
        pose_gt_it[:3, :3] = R_gt
        pose_gt.append(pose_gt_it)

        if it_img % 100 == 0:
            print(it_img)

        object_to_test = "%06d" % it_img
        out_file = os.path.join(out_folder_object, object_to_test + "_predict.npy")
        out_dict = {
            'pose_pred': pose_pred_it,
            'pose_gt': pose_gt_it,
        }
        np.save(out_file, out_dict)

        it_img += 1