Esempio n. 1
0
    def load_relative(self, img_set, img_num):
        # load and format the images, depending on number of channels
        img_type = 'RGB' if self.input_channels == 6 else 'L'

        img_1 = load_image(self.images[img_set][img_num],
                           type=img_type,
                           resolution=self.input_resolution)
        img_2 = load_image(self.images[img_set][img_num + 1],
                           type=img_type,
                           resolution=self.input_resolution)

        if self.input_channels == 2:
            # 2 channels
            #    0 R = img_1 grayscale
            #	1 G = img_2 grayscale
            img = Image.merge("LA", (img_1, img_2))
        elif self.input_channels == 3:
            # 3 channels
            #    0 R = img_1 grayscale
            #	1 G = img_2 grayscale
            #	2 B = zero
            img = Image.merge("RGB",
                              (img_1, img_2, Image.new('L', img_1.size, (0))))
        elif self.input_channels == 6:
            # 6 channels
            #    0 = img_1 R
            #	1 = img_1 G
            #	2 = img_1 B
            #	3 = img_2 R
            #	4 = img_2 G
            #	5 = img_2 B
            img = np.concatenate((np.asarray(img_1), np.asarray(img_2)),
                                 axis=2)
        else:
            raise Exception('invalid in_channels {:d}'.format(
                self.input_channels))

        # apply image transform
        if self.transform is not None:
            img = self.transform(img)
        else:
            img = torch.from_numpy(img.transpose((2, 0, 1)))

        # calc pose deltas:
        #  - global translation (if not predicting orientations)
        #  - relative translation/rotation (if predicting orientations)
        pose_1 = self.poses[img_set][img_num]
        pose_2 = self.poses[img_set][img_num + 1]

        translation = [pose_2[n] - pose_1[n]
                       for n in range(3)]  # global translation

        if self.predict_orientations:
            quat_1 = Quaternion(pose_1[6], pose_1[3], pose_1[4], pose_1[5])
            quat_2 = Quaternion(pose_2[6], pose_2[3], pose_2[4], pose_2[5])

            quat_delta = quat_1.inverse * quat_2  # relative rotation
            translation = quat_1.rotate(translation)  # relative translation
            pose_delta = translation + [
                quat_delta.x, quat_delta.y, quat_delta.z, quat_delta.w
            ]

            if quat_delta == False:
                print('warning:  zero quaternion, relative rotation ({:d})'.
                      format(idx))
        else:
            pose_delta = translation

        # scale/normalize output
        if self.scale_outputs:
            pose_delta = scale(pose_delta, self.output_range)

        if self.normalize_outputs:
            pose_delta = normalize_std(pose_delta, self.output_mean,
                                       self.output_std)

        #print('idx {:04d}  {:s}'.format(idx, str(pose_delta)))
        return img, torch.Tensor(pose_delta)
Esempio n. 2
0
class BoatData:
    qx_180 = Quaternion(axis=[1, 0, 0], angle=np.pi)
    qz_90p = Quaternion(axis=[0, 0, 1], angle=np.pi / 2)
    q_rot = qz_90p * qx_180

    def __init__(self):
        self.position  = [0.0, 0.0, 0.0]
        self.velocity =  [0.0, 0.0, 0.0]
        self.acceleration = [0.0, 0.0, 0.0]
        self.velocity_rot = [0.0, 0.0, 0.0]
        self.acceleration_rot = [0.0, 0.0, 0.0]
        self.bodyAxis= [0.0, 0.0, 0.0]
        #self.RotMatrix = RPYToRot(0.0, 0.0, 0.0)
        self.quat_orientation = Quaternion(w=1.0,x=0.0,y=0.0,z=0.0)
        self.euler = [0.0, 0.0, 0.0]
        self.angular_velo = [0.0, 0.0, 0.0]
        self.angular_velo_rot = [0.0, 0.0, 0.0]
        self.accelMedianX = np.array([0.0, 0.0, 0.0, 0.0, 0.0])
        self.accelMedianY = np.array([0.0, 0.0, 0.0, 0.0, 0.0])
        self.accelMedianZ = np.array([0.0, 0.0, 0.0, 0.0, 0.0])
        self.veloMedianX = np.array([0.0, 0.0, 0.0, 0.0, 0.0])
        self.veloMedianY = np.array([0.0, 0.0, 0.0, 0.0, 0.0])
        self.veloMedianZ = np.array([0.0, 0.0, 0.0, 0.0, 0.0])
        self.filteredAccelValue=[0.0, 0.0, 0.0]
        self.filteredVeloValue = [0.0, 0.0, 0.0]

        self.camera_position=[0.0, 0.0, 0.0]
        self.camera_velocity= [0.0, 0.0, 0.0]
        self.camera_orientation = Quaternion(w=1.0,x=0.0,y=0.0,z=0.0)
        self.euler_camera = [0.0, 0.0, 0.0]



        #Publisher
        self.position_publish = rospy.Publisher('/mavros/local_position/pose_NED2', PoseStamped, queue_size=1)
        self.velocity_publish = rospy.Publisher('/mavros/local_position/velocity_localNED2', TwistStamped, queue_size=1)
        self.accel_publish = rospy.Publisher('/mavros/imu/data_NED2', Imu, queue_size=1)
        self.angular_velocity_publish = rospy.Publisher('/mavros/local_position/velocity_bodyNED2', TwistStamped, queue_size=1)
        #self.current_axis_pub = rospy.Publisher('/hippocampus/current_axis', HippocampusControl,
          #                                              queue_size=1)
        # Subscriber
        self.axis_publish = rospy.Publisher('/hippocampus/currentaxis', HippocampusCurrentaxis, queue_size=1)
        rospy.Subscriber("/mavros/local_position/pose", PoseStamped, self.poseCallback)
        rospy.Subscriber("/mavros/local_position/velocity_body", TwistStamped, self.angularVeloCallback)
        rospy.Subscriber("/mavros/local_position/velocity_local", TwistStamped, self.veloCallback)
        rospy.Subscriber("/mavros/imu/data", Imu, self.imuCallback)

        rospy.Subscriber("/mavros/local_position/pose_NED", PoseStamped, self.cameraPoseCallback)
        rospy.Subscriber("/estimated_twist", TwistStamped, self.cameraVeloCallback)

    def poseCallback(self, msg):
        #print("POSE CALLBACK", msg.pose.position.x)
        #print("POSE CALLBACK2", self.position)
        tmpQuat = Quaternion(w=msg.pose.orientation.w,
                                           x=msg.pose.orientation.x,
                                           y=msg.pose.orientation.y,
                                           z=msg.pose.orientation.z)

        orientation_quat_ned = self.q_rot * (tmpQuat * self.qx_180)
        euler_orientation = euler_from_quaternion(np.array([orientation_quat_ned.x,orientation_quat_ned.y,orientation_quat_ned.z,orientation_quat_ned.w]))
        self.quat_orientation = orientation_quat_ned #Orientation in NED as Quaternion

        self.euler = euler_orientation #Orientation in NED as Euler
        #Generate Random Errors if desired
        randx = random.uniform(-1.,1.) *0.1 *0
        randy = random.uniform(-1.,1.) *0.1 *0
        randz = random.uniform(-1.,1.) *0.1 *0
        randdistance=np.linalg.norm(np.array([randx,randy,randz]))
        #print("RandomErrors :",randx,randy,randz,randdistance)


        self.position = self.q_rot.rotate(np.array([msg.pose.position.x +  randx,
                                                    msg.pose.position.y + randy,
                                                    msg.pose.position.z + randz]))


        #Publish Transformed Position and Orientation to ROS
        NED = PoseStamped()
        NED.header = msg.header  #TIMESTAMP?
        NED.header.frame_id = 'global_tank'
        NED.pose.position.x = self.position[0]
        NED.pose.position.y = self.position[1]
        NED.pose.position.z = self.position[2]
        current_axis = self.getBodyAxisQuat()
        NED.pose.orientation.w = self.quat_orientation.w
        NED.pose.orientation.x = self.quat_orientation.x
        NED.pose.orientation.y = self.quat_orientation.y
        NED.pose.orientation.z = self.quat_orientation.z
        currAxis = HippocampusCurrentaxis()
        currAxis.frame_stamp = rospy.Time.now()
        currAxis.axis_x = current_axis[0]
        currAxis.axis_y = current_axis[1]
        currAxis.axis_z = current_axis[2]
        self.axis_publish.publish(currAxis)
        self.position_publish.publish(NED)
        #current_axis = self.normalize(self.quat_orientation.rotate(np.array([1, 0, 0])))
        #hcc = HippocampusControl()
        #hcc.frame_stamp = rospy.Time.now()

        #hcc.thrust = 0.0
        #hcc.roll_effort = current_axis[0]
        #hcc.pitch_effort =current_axis[1]
        #hcc.yaw_effort = current_axis[2]
        #self.current_axis_pub.publish(hcc)

    def veloCallback(self,velo):
        self.velocity[0] = velo.twist.linear.x
        self.velocity[1] = velo.twist.linear.y
        self.velocity[2] = velo.twist.linear.z
        randx = random.uniform(-1.,1.) * 0.1*0
        randy = random.uniform(-1.,1.) * 0.1*0
        randz = random.uniform(-1.,1.) * 0.1*0
        randdistance = np.linalg.norm(np.array([randx, randy, randz]))
        #print("RandomErrors Velo:", randx, randy, randz, randdistance)
        self.velocity_rot = self.q_rot.rotate(np.array([velo.twist.linear.x+randx, velo.twist.linear.y+randy, velo.twist.linear.z+randz]))
        #print("Velocity", self.velocity)
        self.veloMedianX = np.roll(self.veloMedianX, 1)
        self.veloMedianY = np.roll(self.veloMedianY, 1)
        self.veloMedianZ = np.roll(self.veloMedianZ, 1)

        self.veloMedianX[0] = self.velocity_rot[0]
        self.veloMedianY[1] = self.velocity_rot[1]
        self.veloMedianZ[2] = self.velocity_rot[2]

        self.filteredVeloValue[0] = (np.sum(self.veloMedianX) / len(self.veloMedianX))
        self.filteredVeloValue[1] = (np.sum(self.veloMedianY) / len(self.veloMedianY))
        self.filteredVeloValue[2] = (np.sum(self.veloMedianZ) / len(self.veloMedianZ))

        veloNED =TwistStamped()
        veloNED.header = velo.header  #TIMESTAMP?
        veloNED.header.frame_id = 'global_tank'
        veloNED.twist.linear.x = self.filteredVeloValue[0]
        veloNED.twist.linear.y = self.filteredVeloValue[1]
        veloNED.twist.linear.z = self.filteredVeloValue[2]
        self.velocity_publish.publish(veloNED)

    def imuCallback(self, imu):
        self.acceleration[0] = imu.linear_acceleration.x
        self.acceleration[1] = imu.linear_acceleration.y
        self.acceleration[2] = imu.linear_acceleration.z

        #print("Accel", self.acceleration)
        accel_temp = np.array([-imu.linear_acceleration.x, imu.linear_acceleration.y, imu.linear_acceleration.z])
        #print("AccelRaw",accel_temp)
        accel_temp_trans = self.getQuaternionOrientation().rotate(accel_temp)
        accel_temp_trans[0]=  -accel_temp_trans[0]
        accel_temp_trans[1]=  -accel_temp_trans[1]
        accel_temp_trans[2] = -(accel_temp_trans[2] - 9.81)
        self.acceleration_rot = accel_temp_trans

        self.accelMedianX = np.roll(self.accelMedianX, 1)
        self.accelMedianY = np.roll(self.accelMedianY, 1)
        self.accelMedianZ = np.roll(self.accelMedianZ, 1)

        self.accelMedianX[0] = accel_temp_trans[0]
        self.accelMedianY[1] = accel_temp_trans[1]
        self.accelMedianZ[2] = accel_temp_trans[2]

        self.filteredAccelValue[0]= (np.sum(self.accelMedianX) / len(self.accelMedianX))
        self.filteredAccelValue[1] = (np.sum(self.accelMedianY) / len(self.accelMedianY))
        self.filteredAccelValue[2] = (np.sum(self.accelMedianZ) / len(self.accelMedianZ))
       # print(imu.linear_acceleration.x)
        #print("X",self.accelMedianX)
        #print("X Filtered", self.filteredAccelValue[0])
        accelNED=Imu()
        accelNED.header=imu.header
        accelNED.linear_acceleration.x=self.filteredAccelValue[0]
        accelNED.linear_acceleration.y = self.filteredAccelValue[1]
        accelNED.linear_acceleration.z = self.filteredAccelValue[2]
        self.accel_publish.publish(accelNED)



    def angularVeloCallback(self, ang_velo):
        self.angular_velo[0] = ang_velo.twist.angular.x
        self.angular_velo[1] = ang_velo.twist.angular.y
        self.angular_velo[2] = ang_velo.twist.angular.z
        tmp_angular_velo_rot = np.array([ang_velo.twist.angular.x, ang_velo.twist.angular.y, ang_velo.twist.angular.z])
        self.angular_velo_rot = self.qx_180.rotate(tmp_angular_velo_rot)
        ang_veloNED = TwistStamped()
        ang_veloNED.header = ang_velo.header  # TIMESTAMP?
        ang_veloNED.header.frame_id = 'global_tank'
        ang_veloNED.twist.angular.x=  self.angular_velo_rot[0]
        ang_veloNED.twist.angular.y = self.angular_velo_rot[1]
        ang_veloNED.twist.angular.z = self.angular_velo_rot[2]
        self.angular_velocity_publish.publish(ang_veloNED)

    def cameraPoseCallback(self, camera_pose):
        self.camera_position[0] = camera_pose.pose.position.x
        self.camera_position[1] = camera_pose.pose.position.y
        self.camera_position[2] = camera_pose.pose.position.z
        self.camera_orientation = Quaternion(w=camera_pose.pose.orientation.w,
                                             x=camera_pose.pose.orientation.x,
                                             y=camera_pose.pose.orientation.y,
                                             z=camera_pose.pose.orientation.z)
        self.euler_camera = euler_from_quaternion(np.array([camera_pose.pose.orientation.x,camera_pose.pose.orientation.y,
                                                            camera_pose.pose.orientation.z,camera_pose.pose.orientation.w]))

    def cameraVeloCallback(self, camera_velocity):
        self.camera_velocity[0]=  camera_velocity.twist.linear.x
        self.camera_velocity[1]=  camera_velocity.twist.linear.y
        self.camera_velocity[2] = camera_velocity.twist.linear.z

        
    def normalize(self, v):
        norm = np.linalg.norm(v)
        if norm == 0:
            return v

        return v / norm
    def getFilteredVelo(self):
        return self.filteredVeloValue
    def getFilteredAccel(self):
        return self.filteredAccelValue

    def getPosition(self):
        return self.position

    def getVelocity(self):
        return self.velocity

    def getAcceleration(self):
        return self.acceleration

    def getVelocityRotated(self):
        return self.velocity_rot

    def getAccelerationRotated(self):
        return self.acceleration_rot

    def getAngularVelocityRotated(self):
        return self.angular_velo_rot

    def getQuaternionOrientation(self):
        return self.quat_orientation

    def getQuaternionOrientationCamera(self):
        return self.camera_orientation

    def getEulerOrientation(self):
        return self.euler

    def getEulerOrientationCamera(self):
        return self.euler_camera

    def getBodyAxisQuat(self):
        return self.normalize(self.quat_orientation.rotate(np.array([1, 0, 0])))

    def getBodyAxisQuatCamera(self):
        return self.normalize(self.camera_orientation.rotate(np.array([1, 0, 0])))

    def getPositionCamera(self):
        return self.camera_position

    def getVelocityCamera(self):
        return self.camera_velocity
    def sampled_kvectors_spherical_coordinates(self, NA_in, NA_out, NumSample,
                                               kd):
        #sample multiple planewaves at different angle to do simulation as a focused beam
        # return a list of planewave direction vectors Kd
        # and the corresponding scale factor if using uniform sampling
        # NA: numberical aperture of the lens which planewaves are focused from
        # NumSample: number of samples(planewaves)
        # kd: center planewave of the focused beam

        #allocatgge space for the field and initialize it to zero
        start3 = time.time()
        CenterKd = self.k  #defualt planewave coming in perpendicular to the surface
        kd = kd / np.linalg.norm(kd)  #normalize the new planewave
        r = np.sqrt(
            CenterKd[0]**2 + CenterKd[1]**2 + CenterKd[2]**2
        )  #radiance of the hemisphere where the k vectors are sampled from

        if (
                kd[0] == CenterKd[0] and kd[1] == CenterKd[1]
                and kd[2] == CenterKd[2]
        ):  #if new planewave is at the same direction as the default plane wave
            rotateAxis = CenterKd  #set rotation axis as defualt k vector
            RoAngle = 0  #set rotation axis as 0 degrees
        else:  #if new plane wave is at different direction as the defualt planewave, rotation is needed
            rotateAxis = np.cross(
                CenterKd, kd
            )  #find a axis which is perpendicular to both vectors to be rotation axis
            RoAngle = math.asin(kd[2] / r)  #calculate the rotation angle
        beamRotate = Quaternion(
            axis=rotateAxis, angle=RoAngle)  #create a quaternion for rotation

        Kd = np.zeros((3, NumSample))  #initialize the planewave list
        #        scaleFactor = np.zeros(NumSample)                          #initialize a list of scalefactors which are used to scale down the amplitude of planewaves later on along latitude domain

        #convert the axis from Cartesian to Spherical
        pha = math.acos(CenterKd[2] /
                        r)  #calculate polar angle pha from Z coordinate

        phaM = math.asin(NA_out / np.real(
            self.n))  #calculate sample range of pha from numerical aperture

        inZ = np.cos(pha)  #lower boundary of sampling along Z axis
        outZ = np.cos(phaM)  #upper boundary of sampling along Z axis

        rangeZ = np.abs(inZ) - np.abs(outZ)  #sampling range along Z axis

        #        phaStep = phaM / NumSample                                  #set longitudinal sample resolution as maximal pha divided by number of samples
        #        thetaStep = thetaM / NumSample                              #set latitudinal sample resolution as maximal theta divided by number of samples

        ###following is uniform sampling
        #        for i in range(NumSample):                                  #sample along longitudinal (pha) domain
        #            for j in range(NumSample):                              #sample along latitudinal (theta) domain
        #                KdR = r                                             #sample hemisphere radiance will be all the same as r
        #                KdTheta = theta + thetaStep * j                     #sample theta at each step in the sample range
        #                KdPha = pha + phaStep * i                           #sample theta at each step in the sample range
        #                Kd[0,j,i] = KdR * np.cos(KdTheta) * np.sin(KdPha)   #convert coordinates from spherical to Cartesian
        #                Kd[1,j,i] = KdR * np.sin(KdTheta) * np.sin(KdPha)
        #                Kd[2,j,i] = KdR * np.cos(KdPha)
        #                Kd[:,j,i] = beamRotate.rotate(Kd[:,j,i])            #rotate k vectors by the quaternion generated
        #                scaleFactor[j,i] = np.sin(KdPha)                    #calculate the scalefactors by the current polar angle pha
        #

        ###here comes Monte Carlo Sampling
        for i in range(NumSample):
            KdR = r  #the r coordinate of the vector under spherical system
            KdTheta = random.random(
            ) * 2 * np.pi  #get a random value for theta coordinate under spherical system
            KdZ = random.random(
            ) * rangeZ + inZ  #get a random value for Z coordinate under cartesian system
            KdPha = math.acos(KdZ)  #convert it back to spherical system
            Kd[0, i] = KdR * np.cos(KdTheta) * np.sin(
                KdPha)  #convert coordinates from spherical to Cartesian
            Kd[1, i] = KdR * np.sin(KdTheta) * np.sin(
                KdPha
            )  #the reason why we sample Z at cartesian is that we want the vectors to distribute randomly on that direction
            Kd[2, i] = KdR * np.cos(
                KdPha
            )  #if we sample it on phi domain, they will be denser towards center
            Kd[:, i] = beamRotate.rotate(
                Kd[:, i])  #rotate k vectors by the quaternion generated


#        Kd = np.reshape(Kd, ((3, NumSample ** 2)))
#        scaleFactor = np.reshape(scaleFactor, ((NumSample ** 2)))   #reshape list of k vectors and scalefactors to an one dimentional list

        end3 = time.time()
        print("sample planewaves: " + str(end3 - start3) + " s\n")

        return Kd
Esempio n. 4
0
class Pose:
    """SE(3) rigid transform class that allows compounding of 6-DOF poses
    and provides common transformations that are commonly seen in geometric problems.
    """
    def __init__(self,
                 wxyz=np.float64([1, 0, 0, 0]),
                 tvec=np.float64([0, 0, 0])):
        """Initialize a Pose with Quaternion and 3D Position

        Parameters
        ----------
        wxyz: np.float64 or Quaternion or torch.FloatTensor, (default: np.float64([1,0,0,0]))
            Quaternion/Rotation (wxyz)

        tvec: np.float64 or torch.FloatTensor, (default: np.float64([0,0,0]))
            Translation (xyz)
        """
        if isinstance(wxyz, torch.FloatTensor):
            wxyz = wxyz.numpy()
        if isinstance(tvec, torch.FloatTensor):
            tvec = tvec.numpy()

        assert isinstance(wxyz, (np.ndarray, Quaternion))
        assert isinstance(tvec, np.ndarray)

        self.quat = Quaternion(wxyz)
        self.tvec = tvec

    def __repr__(self):
        formatter = {'float_kind': lambda x: '%.2f' % x}
        tvec_str = np.array2string(self.tvec, formatter=formatter)
        return 'wxyz: {}, tvec: ({})'.format(self.quat, tvec_str)

    def copy(self):
        """Return a copy of this pose object.

        Returns
        ----------
        result: Pose
            Copied pose object.
        """
        return self.__class__(Quaternion(self.quat), self.tvec.copy())

    def __mul__(self, other):
        """Left-multiply Pose with another Pose or 3D-Points.

        Parameters
        ----------
        other: Pose or np.ndarray
            1. Pose: Identical to oplus operation.
               (i.e. self_pose * other_pose)
            2. ndarray: transform [N x 3] point set
               (i.e. X' = self_pose * X)

        Returns
        ----------
        result: Pose or np.ndarray
            Transformed pose or point cloud
        """
        if isinstance(other, Pose):
            assert isinstance(other, self.__class__)
            t = self.quat.rotate(other.tvec) + self.tvec
            q = self.quat * other.quat
            return self.__class__(q, t)
        elif isinstance(other, BoundingBox3D):
            return BoundingBox3D(self * other.pose, other.sizes)
        else:
            assert other.shape[-1] == 3, 'Point cloud is not 3-dimensional'
            X = np.hstack([other, np.ones((len(other), 1))]).T
            return (np.dot(self.matrix, X).T)[:, :3]

    def __rmul__(self, other):
        raise NotImplementedError('Right multiply not implemented yet!')

    def inverse(self):
        """Returns a new Pose that corresponds to the
        inverse of this one.

        Returns
        ----------
        result: Pose
            Inverted pose
        """
        qinv = self.quat.inverse
        return self.__class__(qinv, qinv.rotate(-self.tvec))

    @property
    def matrix(self):
        """Returns a 4x4 homogeneous matrix of the form [R t; 0 1]

        Returns
        ----------
        result: np.ndarray
            4x4 homogeneous matrix
        """
        result = self.quat.transformation_matrix
        result[:3, 3] = self.tvec
        return result

    @property
    def rotation_matrix(self):
        """Returns the 3x3 rotation matrix (R)

        Returns
        ----------
        result: np.ndarray
            3x3 rotation matrix
        """
        result = self.quat.transformation_matrix
        return result[:3, :3]

    @property
    def rotation(self):
        """Return the rotation component of the pose as a Quaternion object.

        Returns
        ----------
        self.quat: Quaternion
            Rotation component of the Pose object.
        """
        return self.quat

    @property
    def translation(self):
        """Return the translation component of the pose as a np.ndarray.

        Returns
        ----------
        self.tvec: np.ndarray
            Translation component of the Pose object.
        """
        return self.tvec

    @classmethod
    def from_matrix(cls, transformation_matrix):
        """Initialize pose from 4x4 transformation matrix

        Parameters
        ----------
        transformation_matrix: np.ndarray
            4x4 containing rotation/translation

        Returns
        -------
        Pose
        """
        return cls(wxyz=Quaternion(matrix=transformation_matrix[:3, :3]),
                   tvec=np.float64(transformation_matrix[:3, 3]))

    @classmethod
    def from_pose_proto(cls, pose_proto):
        """Initialize pose from 4x4 transformation matrix

        Parameters
        ----------
        pose_proto: Pose_pb2
            Pose as defined in proto/geometry.proto

        Returns
        -------
        Pose
        """

        rotation = np.float64([
            pose_proto.rotation.qw,
            pose_proto.rotation.qx,
            pose_proto.rotation.qy,
            pose_proto.rotation.qz,
        ])

        translation = np.float64([
            pose_proto.translation.x,
            pose_proto.translation.y,
            pose_proto.translation.z,
        ])
        return cls(wxyz=rotation, tvec=translation)

    def to_proto(self):
        """Convert Pose into pb object.

        Returns
        -------
        pose_0S: Pose_pb2
            Pose as defined in proto/geometry.proto
        """
        pose_0S = geometry_pb2.Pose()
        pose_0S.rotation.qw = self.quat.elements[0]
        pose_0S.rotation.qx = self.quat.elements[1]
        pose_0S.rotation.qy = self.quat.elements[2]
        pose_0S.rotation.qz = self.quat.elements[3]

        pose_0S.translation.x = self.tvec[0]
        pose_0S.translation.y = self.tvec[1]
        pose_0S.translation.z = self.tvec[2]
        return pose_0S

    def __eq__(self, other):
        return self.quat == other.quat and (self.tvec == other.tvec).all()
Esempio n. 5
0
    def sampled_kvectors_spherical_coordinates(self, NA, NumSample, kd):
        #sample multiple planewaves at different angle to do simulation as a focused beam
        # return a list of planewave direction vectors Kd
        # and the corresponding scale factor
        # NA: numberical aperture of the lens which planewaves are focused from
        # NumSample: number of samples(planewaves) along longitudinal and latitudinal axis
        # kd: center planewave of the focused beam

        #allocatgge space for the field and initialize it to zero
        start3 = time.time()
        CenterKd = [
            0, 0, -1
        ]  #defualt planewave coming in perpendicular to the surface
        kd = kd / np.linalg.norm(kd)  #normalize the new planewave
        r = np.sqrt(
            CenterKd[0]**2 + CenterKd[1]**2 + CenterKd[2]**2
        )  #radiance of the hemisphere where the k vectors are sampled from

        if (
                kd[0] == CenterKd[0] and kd[1] == CenterKd[1]
                and kd[2] == CenterKd[2]
        ):  #if new planewave is at the same direction as the default plane wave
            rotateAxis = CenterKd  #set rotation axis as defualt k vector
            RoAngle = 0  #set rotation axis as 0 degrees
        else:  #if new plane wave is at different direction as the defualt planewave, rotation is needed
            rotateAxis = np.cross(
                CenterKd, kd
            )  #find a axis which is perpendicular to both vectors to be rotation axis
            RoAngle = math.asin(kd[2] / r)  #calculate the rotation angle
        beamRotate = Quaternion(
            axis=rotateAxis, angle=RoAngle)  #create a quaternion for rotation

        Kd = np.zeros(
            (3, NumSample, NumSample))  #initialize the planewave list
        scaleFactor = np.zeros(
            (NumSample, NumSample)
        )  #initialize a list of scalefactors which are used to scale down the amplitude of planewaves later on along latitude domain

        #convert the axis from Cartesian to Spherical
        if (CenterKd[0] == 0 or CenterKd[1]
                == 0):  #if the defualt planewave is at the direction of Z axis
            theta = 0  #set azimuthal angle theta as 0
        else:
            theta = math.atan(
                CenterKd[1] /
                CenterKd[0])  #if not calculate theta from X and Y coordinates

        pha = math.acos(CenterKd[2] /
                        r)  #calculate polar angle pha from Z coordinate

        phaM = math.asin(
            NA /
            np.real(n))  #calculate sample range of pha from numerical aperture
        thetaM = 2 * np.pi  #set sample range of theta as 2pi
        phaStep = phaM / NumSample  #set longitudinal sample resolution as maximal pha divided by number of samples
        thetaStep = thetaM / NumSample  #set latitudinal sample resolution as maximal theta divided by number of samples
        for i in range(NumSample):  #sample along longitudinal (pha) domain
            for j in range(
                    NumSample):  #sample along latitudinal (theta) domain
                KdR = r  #sample hemisphere radiance will be all the same as r
                KdTheta = theta + thetaStep * j  #sample theta at each step in the sample range
                KdPha = pha + phaStep * i  #sample theta at each step in the sample range
                Kd[0, j, i] = KdR * np.cos(KdTheta) * np.sin(
                    KdPha)  #convert coordinates from spherical to Cartesian
                Kd[1, j, i] = KdR * np.sin(KdTheta) * np.sin(KdPha)
                Kd[2, j, i] = KdR * np.cos(KdPha)
                Kd[:, j, i] = beamRotate.rotate(
                    Kd[:, j, i])  #rotate k vectors by the quaternion generated
                scaleFactor[j, i] = np.sin(
                    KdPha
                )  #calculate the scalefactors by the current polar angle pha

        Kd = np.reshape(Kd, ((3, NumSample**2)))
        scaleFactor = np.reshape(
            scaleFactor, ((NumSample**2))
        )  #reshape list of k vectors and scalefactors to an one dimentional list

        end3 = time.time()
        print("sample planewaves: " + str(end3 - start3) + " s\n")

        return Kd, scaleFactor
Esempio n. 6
0
 def set_force_signal(self, target_pose):
     force_signal = self.desired_force_torque.copy()
     target_q = Quaternion(np.roll(target_pose[3:], -1))
     force_signal[:3] = target_q.rotate(force_signal[:3])
     self.force_control_model.set_goals(force=force_signal)
Esempio n. 7
0
import numpy as np
from pyquaternion import Quaternion
import xml.etree.ElementTree as ET

#root = ET.parse("/root/Desktop/studying/flyBattle/code/data.xml").getroot()
#rocket = root.find('Rocket')
#d = rocket.find('x')
#d = float(d.text)
#print((d))
#a = d + 60
#print(a)
#itemlist = xmldoc.getElementsByTagName('item')
#print(len(itemlist))
#print(itemlist[0].attributes['name'].value)

x = 5
y = 5
z = 5
v = np.array([1, 1, 1])

q1 = Quaternion(axis=[0, 0, 1], angle=np.arctan2(y, x))
qv = q1.rotate([0, 1, 0])
q2 = Quaternion(axis=qv, angle=np.arctan2(z, np.sqrt(x**2 + y**2)))
q3 = q2.conjugate * q1

print(q3.rotate(v))
#print(qv)
Esempio n. 8
0
class Pose(object):
    """
    Pose defined by three dimensional translation and rotation

    The translation is representated by a three dimensional
    row vector. The rotation is represented by a
    quaternion. The pose itself is defined in coordinate system
    which is defined in frame_id (default world)

    Methods
    -------
    from_transformation_matrix(matrix, frame_id='',normalize_rotation=False)
        Creates an instance of Pose from a transformation matrix
    from_posestamped_msg(msg)
        Initialize Pose from ROS posestamped message
    from_pose_msg(msg, frame_id='')
        Initialize Pose from ROS pose message
    normalize_rotation()
        Creates an instance of Pose with normalized quaternion
    is_rotation_normalized()
        Return True if quaternion is normalized
    rotation_matrix()
        Returns the rotation matrix(3x3 numpy array)
    transformation_matrix()
        Returns the transformation matrix in homogenous coordinates (4x4 numpy array)
    inverse(new_frame_id)
        Creates an instance which contains the inverse of this pose
    translate(translation)
        Translate pose
    rotate(rotation)
        Rotate pose
    to_posestamped_msg()
        Creates an instance of the ROS message PoseStamped from Pose
    """
    def __init__(self,
                 translation,
                 rotation,
                 frame_id='world',
                 normalize_rotation=False):
        """
        Initialization of the pose class

        For the internal quaternion representation and also to initialize the
        the class the library pyquaternion is used :
        https://kieranwynn.github.io/pyquaternion/

        Parameters
        ----------
        translation : convertable to numpy array of shape (3,)
            translation or position the pose describes
        rotation : pyquaternion.Quaternion
            rotation the pose describes as Quaternion
        frame_id : str (optinal defaul = 'world')
            name of the coordinate system the pose is defined
        normalize_rotation : bool (optinal default = False)
            If true quaternion normalization is performed in the
            initialization process

        Returns
        ------
        pose : Pose
            An instance of class Pose

        Raises
        ------
        TypeError : type mismatch
            If tranlation vector could not converted to a numpy 
            array of shape (3,) with d type floating and the 
            quaternion is not of type Quaternion.
            If frame_id is not of type str
            If normalize_rotation is not of type bool
        ValueError
            If the quaternion initalization from 
            translation matrix went wrong

        Examples
        --------
        Create a Pose by translation and quaternion and
        perform normalization
        >>> quaternion = Quaternion(matrix=np.eye(3))
        >>> translation = np.array([1.0,1.0,1.0])
        >>> p1 = Pose(translation, quaternion, _, True)
        """

        # translation
        try:
            self.__translation = np.fromiter(translation, float)
            if self.__translation.shape[0] != 3:
                raise ValueError('provided translation is not'
                                 ' convertabel to a numpy vector of size (3,)')
        except (TypeError, ValueError) as exc:
            raise exc

        # rotation
        if isinstance(rotation, Quaternion):
            self.__quaternion = Quaternion(rotation.q.copy())

        else:
            raise TypeError('rotation is not of the expected'
                            ' type Quaternion')

        if not isinstance(frame_id, str):
            raise TypeError('frame_id is not of expected type str')

        self.__frame_id = frame_id

        if not isinstance(normalize_rotation, bool):
            raise TypeError('normalize_rotation is not of expected type bool')

        if normalize_rotation is True:
            self._normalize_rotation()

        self.__translation.flags.writeable = False
        self.__quaternion.q.flags.writeable = False

    @classmethod
    def identity(cls, frame_id='world'):
        """
        Creates a instance of Pose from idenity matrix

        Parameters
        ----------
        frame_id : str (optional defaul = 'world')
            name of the coordinate system the pose is defined

        Returns
        ------
        pose : Pose
            An instance of class Pose
        """

        translation = [0.0, 0.0, 0.0]
        quaternion = Quaternion([1.0, 0.0, 0.0, 0.0])

        return cls(translation, quaternion, frame_id)

    @classmethod
    def from_transformation_matrix(cls,
                                   matrix,
                                   frame_id='world',
                                   normalize_rotation=False):
        """
        Initialization of the pose class from transformation matrix

        Parameters
        ----------
        matrix : numpy.ndarray((4,4),np.dtype=floating)
            A transformation matrix in homogenous coordinates
        frame_id : str (defaul = 'world')
            name of the coordinate system the pose is defined
        normalize_rotation : bool (optional default = False)
            If true quaternion normalization is performed in the
            initialization process

        Returns
        ------
        pose : Pose
            An instance of class Pose

        Raises
        ------
        TypeError : type mismatch
            If tranformation matrix is not a 4x4 numpy array 
            with dtype floating

        Examples
        --------
        Create a Pose instance from transformation matrix and
        set frame_id
        >>> transformation_matrix = np.eye(4)
        >>> p0 = Pose.from_transformation_matrix(transformation_matrix, "global")
        """
        if isinstance(matrix, np.ndarray):
            if (len(matrix.shape) != 2 or matrix.shape[0] != 4
                    or matrix.shape[1] != 4):
                raise ValueError('matrix is not a 4x4 numpy array')
            if not issubclass(matrix.dtype.type, np.floating):
                raise TypeError('matrix is not a' 'dtype is no floating type')

            transformation_matrix = matrix.copy()
            translation = transformation_matrix[:-1, 3]
            try:
                quaternion = Quaternion(matrix=transformation_matrix)
            except ValueError as exc:
                raise ValueError(
                    'quaternion initialization went wrong') from exc

        else:
            raise TypeError('matrix is not of type numpy array')

        return cls(translation, quaternion, frame_id, normalize_rotation)

    @classmethod
    def from_posestamped_msg(cls, msg):
        """
        Initialize Pose from ROS posestamped message

        Parameters
        ----------
        msg : PoseStamped from ROS geometry_msgs
            posestamped message 

        Returns
        -------
        pose : Pose
            Instance of Pose generated from PoseStamped message

        Raises
        ------
        TypeError
            If msg is not of type PoseStamped
        """
        if not isinstance(msg, geometry_msgs.PoseStamped):
            raise TypeError('msg is not of expected type PoseStamped')

        translation = np.fromiter(
            [msg.pose.position.x, msg.pose.position.y, msg.pose.position.z],
            float)

        quaternion = Quaternion([
            msg.pose.orientation.w, msg.pose.orientation.x,
            msg.pose.orientation.y, msg.pose.orientation.z
        ])

        frame_id = msg.header.frame_id

        if not frame_id:
            frame_id = 'world'

        return cls(translation, quaternion, frame_id)

    @classmethod
    def from_pose_msg(cls, msg, frame_id='world'):
        """
        Initialize Pose from ros geometry_msgs/Pose

        Parameters
        ----------
        msg : Pose from geometry_msgs
            pose message

        Returns
        -------
        pose : Pose
            Instance of Pose from pose message

        Raises
        ------
        TypeError
            If msg is not of type Pose
        """

        if not isinstance(msg, geometry_msgs.Pose):
            raise TypeError('msg is not of type ros geometry_msgs/Pose')

        translation = np.fromiter(
            [msg.position.x, msg.position.y, msg.position.z], float)

        quaternion = Quaternion([
            msg.orientation.w, msg.orientation.x, msg.orientation.y,
            msg.orientation.z
        ])

        return cls(translation, quaternion, frame_id)

    @property
    def frame_id(self):
        """
        frame_id : str (readonly)
            Id of the coordinate system / frame
        """
        return self.__frame_id

    @property
    def translation(self):
        """
        translation : numpy.array((3,) dtype=floating) (readonly)
            numpy row array of size 3 which describes the translation
        """
        return self.__translation

    @property
    def quaternion(self):
        """
        quaternion: Quaternion(pyquaternion lib) (readonly)
            Quaternion which describes the rotation
        """
        return self.__quaternion

    def normalize_rotation(self):
        """
        Creates an instance of Pose with normalized quaternion

        Returns
        ------
        pose : Pose
            Instance of Pose with normalized quaternion / rotation
        """
        if np.isclose(np.linalg.norm(self.__quaternion.elements), 1.0):
            return self
        else:
            return Pose(self.__translation,
                        self.__quaternion,
                        normalize_rotation=True)

    def _normalize_rotation(self):
        self.__quaternion._fast_normalise()
        if self.__quaternion[0] < 0:
            self.__quaternion = -self.__quaternion

    def is_rotation_normalized(self):
        """
        Returns True is rotation is normalized

        Returns
        -------
        result : bool
            True is rotation is normalized else False
        """

        if np.isclose(np.linalg.norm(self.__quaternion.elements), 1.0):
            return True
        else:
            return False

    def rotation_matrix(self):
        """
        Returns the rotation matrix (3x3 numpy array)

        Returns
        ------
        rotation_matrix : np.ndarray
            A 3x3 numpy array with dtype float which represents the rotation
            matrix
        """

        return self.__quaternion.rotation_matrix

    def transformation_matrix(self):
        """
        Return the transformation martix in homogenous coordinates (4x4 numpy array)

        Returns
        ------
        transformation_matrix : np.ndarray
            A 4x4 numpy array with dtype float which represents the transformation
            matrix in homogenous coordinates
        """
        transformation_matrix = self.__quaternion.transformation_matrix
        transformation_matrix[:-1, -1] = self.__translation
        return transformation_matrix

    def inverse(self, new_frame_id):
        """
        Creates an instance which contains the inverse of this pose

        Parameters
        ----------
        new_frame_id : str convertable
            name of the coordinate system in which pose is now defined 

        Returns
        -------
        inv_pose : Pose
            pose which is the inverse of self

        Raises
        ------
        TypeError : type mismatch
            If new_frame_id is not of type str
        """
        q_inv = self.__quaternion.inverse
        t_inv = q_inv.rotate(-self.translation)
        return self.__class__(t_inv, q_inv, new_frame_id)

    def translate(self, translation):
        """
        Translate pose

        Parameters
        ----------
        translation : Iterable
            translation which defines the change in x,y,z

        Returns
        -------
        translated_pose : Pose
            pose which is translated by translate vector

        Raises
        ------
        TypeError : type mismatch
            If translation is not convertable to np.ndarray
        ValueError
            If translation is not of size 3 (x,y,z)
        """

        try:
            translation = np.fromiter(translation, float)
            if translation.shape[0] != 3:
                raise ValueError('provided translation is not'
                                 ' convertabel to a numpy vector of size (3,)')
        except (TypeError, ValueError) as exc:
            raise exc

        new_t = self.__translation + translation
        return type(self)(new_t, self.__quaternion, self.__frame_id)

    def rotate(self, rotation):
        """
        Rotate pose

        Parameters
        ----------
        rotation: (Quaternion or np.ndarray)
            defines rotation by Quaternion or 3x3 rotation matrix

        Returns
        -------
        rotated_pose : Pose
            pose which is rotated by rotation

        Raises
        ------
        TypeError : type mismatch
            If rotation is not one of expected types
            3x3 np.ndarray or pyquaternion.Quaternion
        ValueError
            If initialization of Quaternion is not possible
        """
        is_quaternion = isinstance(rotation, Quaternion)
        is_rotation_matrix = (isinstance(rotation, np.ndarray)
                              and rotation.shape == (3, 3))

        if is_quaternion:
            new_q = self.__quaternion * rotation
        elif is_rotation_matrix:
            try:
                new_q = self.__quaternion * Quaternion(matrix=rotation)
            except ValueError as exc:
                raise ValueError(
                    'quaternion initialization went wrong') from exc
        else:
            raise TypeError('rotation is not one of expected types '
                            'Quaternion or np.ndarray (3x3)')

        return type(self)(self.__translation, new_q, self.__frame_id)

    def to_posestamped_msg(self):
        """
        Creates an instance of the ROS message PoseStamped

        Returns
        ------
            Instance of ROS message PoseStamped (seq and time are not set)
            docs.ros.org/kinetic/api/geometry_msgs/html/msg/PoseStamped.html

        """

        pose_stamped = geometry_msgs.PoseStamped()
        pose_stamped.header.frame_id = self.__frame_id

        pose_stamped.pose.position.x = self.__translation[0]
        pose_stamped.pose.position.y = self.__translation[1]
        pose_stamped.pose.position.z = self.__translation[2]

        pose_stamped.pose.orientation.w = self.__quaternion[0]
        pose_stamped.pose.orientation.x = self.__quaternion[1]
        pose_stamped.pose.orientation.y = self.__quaternion[2]
        pose_stamped.pose.orientation.z = self.__quaternion[3]

        return pose_stamped

    def to_pose_msg(self):
        """
        Creates an instance of the ROS message Pose

        Returns
        ------
            Instance of ROS message Pose
            geometry_msgs/Pose

        """

        pose = geometry_msgs.Pose()

        pose.position.x = self.__translation[0]
        pose.position.y = self.__translation[1]
        pose.position.z = self.__translation[2]

        pose.orientation.w = self.__quaternion[0]
        pose.orientation.x = self.__quaternion[1]
        pose.orientation.y = self.__quaternion[2]
        pose.orientation.z = self.__quaternion[3]

        return pose

    def __str__(self):
        axes = tuple(['w', 'x', 'y', 'z'])

        translation_str = '\n'.join([
            'translation.' + axes[i + 1] + ' : ' + str(value)
            for i, value in enumerate(self.__translation)
        ])
        quaternion_str = '\n'.join([
            'quaternion.' + axes[i] + ' : ' + str(value)
            for i, value in enumerate(self.__quaternion)
        ])

        return ('Pose:\n' + translation_str + '\n' + quaternion_str +
                '\nframe_id : ' + self.__frame_id)

    def __repr__(self):
        return self.__str__()

    def __eq__(self, other):
        r_tol = 1.0e-6
        a_tol = 1.0e-7

        if not isinstance(other, self.__class__):
            return False

        if id(other) == id(self):
            return True

        if not np.allclose(
                self.__translation, other.translation, rtol=r_tol, atol=a_tol):
            return False

        if self.__quaternion != other.quaternion:
            return False

        return True

    def __ne__(self, other):
        return not self.__eq__(other)

    def __mul__(self, other):

        if isinstance(other, self.__class__):
            new_q = self.__quaternion * other.quaternion
            new_t = (self.__translation +
                     self.__quaternion.rotate(other.translation))
            return self.__class__(new_t, new_q, self.frame_id)
        elif (isinstance(other, np.ndarray)
              and issubclass(other.dtype.type, np.floating)):
            if other.shape == (3, ):
                new_t = (self.__translation + self.__quaternion.rotate(other))
                return new_t
            elif other.shape == (3, 1):
                new_t = (self.__translation + self.__quaternion.rotate(other))
                return np.expand_dims(new_t, axis=1)
            elif other.shape == (4, ):
                new_t = np.ones(other.shape)
                new_t[0:3] = (self.__translation * other[-1] +
                              self.__quaternion.rotate(other[:3]))
                return new_t
            elif other.shape == (4, 1):
                new_t = np.ones(other.shape)
                t3 = (self.__translation * other[-1] +
                      self.__quaternion.rotate(other[:3]))
                new_t[0:3] = np.expand_dims(t3, axis=1)
                return new_t
            elif other.shape == (4, 4):
                product = np.matmul(self.transformation_matrix(), other)
                return self.from_transformation_matrix(product,
                                                       self.__frame_id)
            else:
                TypeError('vector is not of shape (3,), (3,1),'
                          ' (4,), (4,1)  or matrix (4,4)')

        else:
            TypeError('other is not of expected type Pose,'
                      ' 3 or 4 dimensional numpy vector or'
                      ' 4x4 transformation matrix dtype floating')

    def __rmul__(self, other):
        return np.dot(other, self.transformation_matrix())

    __array_priority__ = 10000
Esempio n. 9
0
def main():

    global publish_magnetic_data
    global show_magnetic_field

    # load json and create model
    json_file = open(
        '/home/letrend/workspace/roboy3/src/ball_in_socket_estimator/python/' +
        model_name + '.json', 'r')
    loaded_model_json = json_file.read()
    json_file.close()
    model = model_from_json(loaded_model_json)
    # load weights into new model
    model.load_weights(
        "/home/letrend/workspace/roboy3/src/ball_in_socket_estimator/python/" +
        model_name + ".h5")  #_checkpoint
    print("Loaded model from disk")
    model.summary()

    rospy.init_node('replay')
    listener = tf.TransformListener()
    broadcaster = tf.TransformBroadcaster()

    magneticSensor_pub = rospy.Publisher('roboy/middleware/MagneticSensor',
                                         MagneticSensor,
                                         queue_size=1)
    joint_state_pub = rospy.Publisher('external_joint_states',
                                      sensor_msgs.msg.JointState,
                                      queue_size=1)
    joint_targets_pub = rospy.Publisher('joint_targets',
                                        sensor_msgs.msg.JointState,
                                        queue_size=1)
    visualization_pub = rospy.Publisher('visualization_marker',
                                        visualization_msgs.msg.Marker,
                                        queue_size=100)
    rospy.loginfo('loading data')
    dataset = pandas.read_csv("/home/letrend/workspace/roboy3/" + data_name +
                              "_data0.log",
                              delim_whitespace=True,
                              header=1)
    dataset = dataset.values[1:len(dataset) - 1, 0:]
    print('%d values' % (len(dataset)))
    dataset = dataset[abs(dataset[:, 12]) <= 0.7, :]
    dataset = dataset[abs(dataset[:, 13]) <= 0.7, :]
    dataset = dataset[abs(dataset[:, 14]) <= 1.5, :]
    euler_set = np.array(dataset[:, 12:15])
    # mean_euler = euler_set.mean(axis=0)
    # std_euler = euler_set.std(axis=0)
    # euler_set = (euler_set - mean_euler) / std_euler
    print('max euler ' + str(np.amax(euler_set)))
    print('min euler ' + str(np.amin(euler_set)))
    sensors_set = np.array([
        dataset[:, 0], dataset[:, 1], dataset[:, 2], dataset[:, 3],
        dataset[:, 4], dataset[:, 5], dataset[:, 6], dataset[:, 7],
        dataset[:, 8], dataset[:, 9], dataset[:, 10], dataset[:, 11]
    ])
    sensors_set = np.transpose(sensors_set)
    # mean_sensor = sensors_set.mean(axis=0)
    # std_sensor = sensors_set.std(axis=0)
    # sensors_set = (sensors_set - mean_sensor) / std_sensor
    # sensors_set = wrangle.mean_zero(pandas.DataFrame(sensors_set)).values
    sample = 0
    samples = len(euler_set)
    t = 0
    rate = rospy.Rate(1)
    error = 0
    stride = 1
    print('model predicts')
    euler_predict = model.predict(sensors_set)
    mse = numpy.linalg.norm(euler_predict - euler_set) / len(euler_predict)
    print('mse: ' + str(mse))
    msg = sensor_msgs.msg.JointState()
    msg.header = std_msgs.msg.Header()
    msg.name = ['head_axis0', 'head_axis1', 'head_axis2']
    msg.velocity = [0, 0, 0]
    msg.effort = [0, 0, 0]

    for i in range(0, len(euler_predict), stride):
        if rospy.is_shutdown():
            return
        # rospy.loginfo_throttle(1, (euler_predict[i,0],euler_predict[i,1],euler_predict[i,2]))
        # error = error+numpy.linalg.norm(euler_predict[i,i:i+stride]-euler_set[i,i:i+stride])
        # euler_p = euler_predict[i,:]*std_euler+mean_euler
        euler_predictED = [
            euler_predict[i, 0], euler_predict[i, 1], euler_predict[i, 2]
        ]
        euler_truth = euler_set[i, :]
        broadcaster.sendTransform(
            (0, 0, 0),
            euler_to_quaternion(
                euler_predictED[0], euler_predictED[1],
                euler_predictED[2]),  #euler_p[0],euler_p[1],euler_p[2]),
            rospy.Time.now(),
            "predict",
            "world")
        broadcaster.sendTransform(
            (0, 0, 0),
            euler_to_quaternion(euler_truth[0],
                                euler_truth[1], euler_truth[2]),
            rospy.Time.now(), "ground_truth", "world")
        msg.header.stamp = rospy.Time.now()
        msg.position = euler_predictED
        joint_state_pub.publish(msg)
        msg.position = euler_truth
        joint_targets_pub.publish(msg)

        if show_magnetic_field:

            msg2 = visualization_msgs.msg.Marker()
            msg2.type = msg2.SPHERE
            msg2.id = i
            msg2.color.r = 1
            msg2.color.a = 1
            msg2.action = msg2.ADD
            msg2.header.seq = i
            msg2.header.frame_id = "world"
            msg2.header.stamp = rospy.Time.now()
            msg2.lifetime = rospy.Duration(0)
            msg2.scale.x = 0.01
            msg2.scale.y = 0.01
            msg2.scale.z = 0.01
            msg2.pose.orientation.w = 1
            msg2.ns = "magnetic_field_sensor0"

            sens = numpy.array([s[3], s[4], s[5]])
            q_w = Quaternion(q)
            sens_w = q_w.rotate(sens) * 0.01
            msg2.pose.position.x = sens_w[0]
            msg2.pose.position.y = sens_w[1]
            msg2.pose.position.z = sens_w[2]
            i = i + 1

            visualization_pub.publish(msg2)
        if publish_magnetic_data:
            msg = MagneticSensor()
            msg.id = 5
            msg.sensor_id = [0, 1, 2]
            msg.x = [sensors_set[i, 0], sensors_set[i, 3]]  #,sensors_set[i,6]
            msg.y = [sensors_set[i, 1], sensors_set[i, 4]]  #,sensors_set[i,7]]
            msg.z = [sensors_set[i, 2], sensors_set[i, 5]]  #,sensors_set[i,8]]
            magneticSensor_pub.publish(msg)
            t0 = rospy.Time.now()
        error = numpy.linalg.norm(euler_truth -
                                  euler_predictED) * 180 / math.pi
        print("%d/%d\t\t%.3f%% error %f\n"
              "truth     : %.3f %.3f %.3f\n"
              "predicted : %.3f %.3f %.3f" %
              (t, samples, (t / float(samples)) * 100.0, error, euler_truth[0],
               euler_truth[1], euler_truth[2], euler_predictED[0],
               euler_predictED[1], euler_predictED[2]))
        t = t + stride
        if error > 1:
            rate.sleep()
    error = error / samples
    print("mean squared error: %f" % (error))
    # Signal handler
    rospy.spin()
Esempio n. 10
0
    ax.plot([vec[0]], [vec[1]], [vec[2]], marker=marker, label=label)


target = [-0.00319718, -0.37576394,  0.45137436, -0.4986701138808597, 0.49256585342539383, 0.5200487598517108, -0.4881150324852347]
target = [-0.00319718, -0.37576394,  0.45137436, -0.0021967960033876664, -0.6684438218043409, 0.7437414872148852, -0.0051605594964759876]

init = [-0.02702883, -0.35275209,  0.46842613,  -0.0021968 , -0.66844382,  0.74374149, -0.00516056] 
steps = 300

init_pose = init[:3]
final_pose = target[:3]

target_q = Quaternion(np.roll(target[3:], 1))

p2 = np.zeros(3)
p1 = target_q.rotate(np.array(init_pose) - final_pose)

traj = get_conical_helix_trajectory(p1, p2, steps)
traj2 = get_conical_helix_trajectory(init_pose, final_pose, steps)

traj = np.apply_along_axis(target_q.rotate,1,traj)
traj = traj + final_pose

fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
col = np.arange(steps)
# ax.scatter(traj2[:,0], traj2[:,1], traj2[:,2], s=15,c=col, marker='.')
ax.scatter(traj[:,0], traj[:,1], traj[:,2], s=15,c=col, marker='.')
# plot(ax, p1, label='p1')
# plot(ax, p2, label='p2')
plot(ax, init_pose, label='initial')
Esempio n. 11
0
 def transformToEarthFrame(self, vector, q_):
     """
     onvert coordinates w.r.t drone -> global ("headless")
     """
     q = Quaternion(q_)
     return q.rotate(vector)
Esempio n. 12
0
class BBox3D:
    """
    Class for 3D Bounding Boxes (3-orthotope).
    """
    def __init__(self,
                 x,
                 y,
                 z,
                 length=1,
                 width=1,
                 height=1,
                 rw=1,
                 rx=0,
                 ry=0,
                 rz=0,
                 q=None,
                 euler_angles=None,
                 is_center=True):
        """
        Constructor for 3D bounding box (3-orthotope). It takes either the center of the 3D bounding box or the back-bottom-left corner, the width, height and length of the box, and quaternion values to indicate the rotation.

        Parameters
        ----------
        x : float
            X axis coordinate of 3D bounding box. Can be either center of bounding box or back-bottom-left corner.
        y : float
            Y axis coordinate of 3D bounding box. Can be either center of bounding box or back-bottom-left corner.
        z : float
            Z axis coordinate of 3D bounding box. Can be either center of bounding box or back-bottom-left corner.
        length : float, optional
            The length of the box (the default is 1).
        width : float, optional
            The width of the box (the default is 1).
        height : float, optional
            The height of the box (the default is 1).
        rw : float, optional
            The real part of the rotation quaternion (the default is 1).
        rx : int, optional
            The first element of the quaternion vector (the default is 0).
        ry : int, optional
            The second element of the quaternion vector (the default is 0).
        rz : int, optional
            The third element of the quaternion vector (the default is 0).
        euler_angles : list or ndarray of float, optional
            Sequence of euler angles in `[x, y, z]` rotation order (the default is None).
        is_center : bool, optional
            Flag to indicate if the provided coordinate is the center of the box (the default is True).

        """
        if is_center:
            self._cx, self._cy, self._cz = x, y, z
            self._c = np.array([x, y, z])
        else:
            self._cx = x + length / 2
            self._cy = y + width / 2
            self._cz = z + height / 2
            self._c = np.array([self._cx, self._cy, self._cz])

        self._w, self._h, self._l = width, height, length

        if euler_angles:
            # we need to apply y, z and x rotations in order
            # http://www.euclideanspace.com/maths/geometry/rotations/euler/index.htm
            self._q = Quaternion(axis=[0, 1, 0], angle=euler_angles[1]) * \
                Quaternion(axis=[0, 0, 1], angle=euler_angles[2]) * \
                Quaternion(axis=[1, 0, 0], angle=euler_angles[0])

        elif q is not None:
            self._q = Quaternion(q)
        else:
            self._q = Quaternion(rw, rx, ry, rz)

    @property
    def center(self):
        """
        Attribute to access center coordinates of box.

        Returns
        -------
        ndarray of float
            3-dimensional vector representing (x, y, z) coordinates of the box.
        """
        return self._c

    @center.setter
    def center(self, c):
        """
        Setter for center coordinates of the box.

        Parameters
        ----------
        c : list or ndarray of float
            Center coordinates in (x, y, z) format.
        Raises
        ------
        ValueError
            If `c` is not a vector/list of length 3.

        """
        if len(c) != 3:
            raise ValueError("Center coordinates should be a vector of size 3")
        self._c = c

    def __valid_scalar(self, x):
        if not np.isscalar(x):
            raise ValueError("Value should be a scalar")
        else:  # x is a scalar so we check for numeric type
            if not isinstance(x, (np.float, np.int)):
                raise TypeError("Value needs to be either a float or an int")
        return x

    @property
    def cx(self):
        """
        Get the X coordinate of center.

        Returns
        -------
        float
            X coordinate of center
        """
        return self._cx

    @cx.setter
    def cx(self, x):
        """Setter for X coordinate of center.

        Parameters
        ----------
        x : float
        """
        self._cx = self.__valid_scalar(x)

    @property
    def cy(self):
        """
        Get the Y coordinate of center.

        Returns
        -------
        float
            Y coordinate of center
        """
        return self._cy

    @cy.setter
    def cy(self, x):
        self._cy = self.__valid_scalar(x)

    @property
    def cz(self):
        """
        Get the Z coordinate of center.

        Returns
        -------
        float
            Z coordinate of center
        """
        return self._cz

    @cz.setter
    def cz(self, x):
        self._cz = self.__valid_scalar(x)

    @property
    def q(self):
        """
        Get the the rotation quaternion.

        Returns
        -------
        ndarray of float
            Quaternion values in (w, x, y, z) form.
        """
        return np.hstack((self._q.real, self._q.imaginary))

    @q.setter
    def q(self, q):
        if not isinstance(q, (list, tuple, np.ndarray, Quaternion)):
            raise TypeError(
                "Value shoud be either list, numpy array or Quaterion")
        if isinstance(q, (list, tuple, np.ndarray)) and len(q) != 4:
            raise ValueError("Quaternion input should be a vector of size 4")

        self._q = Quaternion(q)

    @property
    def quaternion(self):
        """
        Get the the rotation quaternion.

        Returns
        -------
        ndarray of float
            Quaternion values in (w, x, y, z) form.
        """
        return self.q

    @quaternion.setter
    def quaternion(self, q):
        self.q = q

    @property
    def l(self):
        """
        Get the length of the box.

        Returns
        -------
        float
            Length of the box.
        """
        return self._l

    @l.setter
    def l(self, x):
        self._l = self.__valid_scalar(x)

    @property
    def length(self):
        """
        Get the length of the box.

        Returns
        -------
        float
            Length of the box.
        """
        return self._l

    @length.setter
    def length(self, x):
        self.l = x

    @property
    def w(self):
        """
        Get the width of the box.

        Returns
        -------
        float
            Width of the box.
        """
        return self._w

    @w.setter
    def w(self, x):
        self._w = self.__valid_scalar(x)

    @property
    def width(self):
        """
        Get the width of the box.

        Returns
        -------
        float
            Width of the box.
        """
        return self._w

    @width.setter
    def width(self, x):
        self.w = x

    @property
    def h(self):
        """
        Get the height of the box.

        Returns
        -------
        float
            Height of the box.
        """
        return self._h

    @h.setter
    def h(self, x):
        self._h = self.__valid_scalar(x)

    @property
    def height(self):
        """
        Get the height of the box.

        Returns
        -------
        float
            Height of the box.
        """
        return self._h

    @height.setter
    def height(self, x):
        self.h = x

    def __transform(self, x):
        """Rotate and translate the point to world coordinates"""
        y = self._c + self._q.rotate(x)
        return y

    @property
    def p1(self):
        p = np.array([-self._l / 2, -self._w / 2, -self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p2(self):
        p = np.array([self._l / 2, -self._w / 2, -self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p3(self):
        p = np.array([self._l / 2, self._w / 2, -self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p4(self):
        p = np.array([-self._l / 2, self._w / 2, -self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p5(self):
        p = np.array([-self._l / 2, -self._w / 2, self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p6(self):
        p = np.array([self._l / 2, -self._w / 2, self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p7(self):
        p = np.array([self._l / 2, self._w / 2, self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p8(self):
        p = np.array([-self._l / 2, self._w / 2, self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p(self):
        """
        Attribute to access ndarray of all corners of box in order.

        Returns
        -------
        ndarray of float
            All corners of the bounding box in order.
        """
        x = np.vstack([
            self.p1, self.p2, self.p3, self.p4, self.p5, self.p6, self.p7,
            self.p8
        ])
        return x

    def __repr__(self):
        return "BBox3D(x={cx},y={cy},z={cz}), length={l},width={w},height={h}, q=({rw}, {rx}, {ry}, {rz}))".format(
            cx=self._cx,
            cy=self._cy,
            cz=self._cz,
            l=self._l,
            w=self._w,
            h=self._h,
            rw=self._q.real,
            rx=self._q.imaginary[0],
            ry=self._q.imaginary[1],
            rz=self._q.imaginary[2])
Esempio n. 13
0
class gameObject(object):
    def __init__(self, id_prefix, pos=sc([0, 0, 0]), mass=100000.0):
        global id_count
        if type(pos) == sc:
            self.position = pos
        else:
            self.position = sc(pos)

        self.inventory = items.inventory()
        self.identifier = id_prefix + "-" + "%06i" % id_count
        self.heading = Quaternion()
        id_count += 1
        global objects
        objects.append(self)
        logging.info("New object: " + str(self))

    def __str__(self):
        return self.identifier + ", " + str(self.position)

    def remove(self):
        global objects
        try:
            objects.remove(self)
            logging.info("object removed: " + str(self))
        except:
            logging.exception("unable to remove self from objects")

    def getPosition(self):
        return self.position.getPosition()

    def getDistanceTo(self, target):
        return abs(target.position - self.position)

    def getAngleTo(self, target, inDeg=False):
        #Move target position to ships origo
        lpos = target.position - self.position
        #Calculate the heading vector
        heading_vect = self.heading.rotate([0.0, 0.0, 1.0])

        dot = np.vdot(heading_vect, lpos.getPosition())
        angle = np.arccos(dot / abs(lpos))
        if inDeg:
            angle *= 180 / np.pi

        return angle

    def getSphericalCoordinateTo(self, target, inDeg=False):

        #Move target position to ships origo
        lpos = target.position - self.position
        #Move the target to local coordinate
        ltarget = self.heading.rotate(lpos.getPosition())
        x, y, z = ltarget

        distance = abs(lpos)
        inclination = np.arccos(x / distance) - np.pi / 2
        azimuth = np.arctan2(y, z)

        if inDeg:
            inclination *= 180 / np.pi
            azimuth *= 180 / np.pi

        return distance, azimuth, inclination

    def getHeading(self):
        return self.heading.rotate([0.0, 0.0, 1.0])

    def get_mass(self):
        return self.inventory.get_mass()
Esempio n. 14
0
    def load_directory(self, path):
        pose_list_filename = os.path.join(path, "images.txt")

        if not os.path.isfile(pose_list_filename):
            return False

        pose_file = open(pose_list_filename, 'r')

        dir_poses = []
        dir_images = []

        while (True):
            pose_line = pose_file.readline()

            if not pose_line:
                break

            if pose_line[0] == '#':
                continue

            #   IMAGE_ID, QW, QX, QY, QZ, TX, TY, TZ, CAMERA_ID, NAME
            pose_tokens = pose_line.rstrip().split(' ')

            if len(pose_tokens) != 10:
                print('invalid pose: ' + pose_line)
                break

            # parse tokens
            image_id = int(pose_tokens[0])
            camera_id = int(pose_tokens[8])
            image_name = pose_tokens[9]

            # https://colmap.github.io/format.html#images-txt
            R = (float(pose_tokens[1]), float(pose_tokens[2]),
                 float(pose_tokens[3]), float(pose_tokens[4]))
            t = (float(pose_tokens[5]), float(pose_tokens[6]),
                 float(pose_tokens[7]))

            # get the camera center from -R^t * T
            q = Quaternion(w=R[0], x=R[1], y=R[2], z=R[3]).inverse
            t = np.negative(q.rotate(np.array([t[0], t[1], t[2]]))).tolist()

            # (this is the same as transposing the quaternion's rotation matrix)
            #m = Quaternion(w=R[0], x=R[1], y=R[2], z=R[3]).rotation_matrix.transpose()
            #t = np.negative(np.dot(m, np.array([t[0], t[1], t[2]]))).tolist()

            # store metadata
            dir_poses.append(t + [q.x, q.y, q.z, q.w])
            dir_images.append(os.path.join(self.root_dir, image_name))

            # skip points2D line
            pose_file.readline()

        pose_file.close()

        self.poses.append(dir_poses)
        self.images.append(dir_images)

        self.num_images += len(dir_images)

        if self.relative_pose:
            self.num_images -= 1

        print('({:s}) found {:04d} images under {:s}'.format(
            self.type, len(dir_images), path))

        return True
def add_pose_estimation(bag):
    v_z = Quaternion(0, 0, 0, 1)
    # orientation of the imu frame
    imu_frame = Quaternion(-0.0198769629216, 0.974536168168, -0.218396560265,
                           -0.0467665023439)
    x_vec = np.array([1, 0, 0])
    time = []
    previous_pcl = pcl.PointCloud()
    current_pcl = pcl.PointCloud()
    est_pose_bool = False
    pose_msg = PoseWithCovarianceStamped()
    pose_msg.header.frame_id = 'base_footprint'
    previous_pose = np.array([])
    previous_ori = []
    x = []
    y = []
    prev_tr = np.array([])
    cov = [0] * 36
    scan_time = []
    ros_time = []
    f_pose = 20  # Hz
    queued_pcl_list = []
    dt = 0
    print 'Extracting poses using ICP'
    for topic, msg, t in bag.read_messages(
            topics=['/imu0/data', '/scan_corrected'],
            start_time=rospy.Time(bag.get_start_time()) + rospy.Duration(0)):
        #
        if topic == '/imu0/data':
            q = Quaternion(msg.orientation.w, msg.orientation.x,
                           msg.orientation.y, msg.orientation.z)
            v_z_ = q * imu_frame * v_z * imu_frame.conjugate * q.conjugate
            imu_rot = q
            if np.arccos(v_z_[3]) - math.pi > -.05:
                est_pose_bool = True
            else:
                est_pose_bool = False

        elif topic == "/scan_corrected" and est_pose_bool:
            previous_pcl.from_array(current_pcl.to_array())
            current_pcl.from_array(to_pcl_array(msg))
            if dt != 0:
                if dt > 1:
                    queued_pcl_list = []
                else:
                    queued_pcl_list.append(previous_pcl)
                    if len(queued_pcl_list
                           ) > 1:  #queue size = 2 less than 1521*2*3 points
                        queued_pcl_list.pop(0)

                    previous_pcl_array = np.concatenate(
                        ([queue_.to_array() for queue_ in queued_pcl_list]),
                        axis=0)
                    previous_pcl.from_array(previous_pcl_array)

            if not time:
                time.append(msg.header.stamp.to_sec())
                prev_tr = np.zeros((1, 3))
                scan_time.append(msg.header.stamp.to_sec())
                ros_time.append(t.to_sec())
                yaw = [0]
            if previous_pcl.size is not 0:
                if len(scan_time) == 1:
                    init_q = get_yaw_quaternion(imu_rot)
                    prev_q = Quaternion(1, 0, 0, 0)
                    q_array = [prev_q]
                icp = current_pcl.make_IterativeClosestPoint()
                # Final = icp.align()
                converged, transf, transf_current_pcl, fitness = icp.icp(
                    current_pcl, previous_pcl)

                # check if the rotation matrix is valid (i.e. det==1)
                if not np.isclose(np.linalg.det(transf[0:3, 0:3]), 1.0):
                    converged = False

                if converged:

                    time.append(msg.header.stamp.to_sec())
                    dt = time[-1] - time[-2]

                    if 0 < dt <= 0.5:  #if more than 10 scans (i.e. .5s) have been dropped, do save the pose (same pose as previous)
                        # get covariance matrix
                        pose_orientation, pose_translation = matrix4_to_rot_trans(
                            imu_rot, transf, mode='2d')
                        #pose_orientation, pose_translation, cov = get_cov(transf, current_pcl, transf_current_pcl)

                        if np.linalg.norm(
                                pose_translation
                        ) / dt * 3.6 < 40:  # the speed between two scans should not be greater than 40 kph
                            prev_tr = np.append(prev_tr, [
                                prev_q.rotate(pose_translation) + prev_tr[-1]
                            ],
                                                axis=0)
                            prev_q = init_q.conjugate * pose_orientation
                            q_array.append(prev_q)

                            scan_time.append(msg.header.stamp.to_sec())
                            ros_time.append(t.to_sec())
                else:
                    dt = 0

    print '%i poses were calculated' % len(scan_time)
    print 'Interpolate the poses (orientation, position)'
    interp_time = np.linspace(
        scan_time[0], scan_time[-1],
        np.round((scan_time[-1] - scan_time[0]) * f_pose))
    tr = np.zeros((interp_time.size, 3))
    interp_ros_time = np.linspace(ros_time[0], ros_time[-1], interp_time.size)
    ori = get_interpolated_quat(interp_time, scan_time, q_array)
    tr[:, 0] = np.interp(interp_time, scan_time, prev_tr[:, 0])
    tr[:, 1] = np.interp(interp_time, scan_time, prev_tr[:, 1])

    print 'Change coordinates to footprint_frame'
    # rotate from laser_frame to footprint frame
    laser_rd = -(90 + 9) * math.pi / 180
    X = np.cos(laser_rd) * tr[:, 1] - np.sin(laser_rd) * tr[:, 0]
    Y = np.sin(laser_rd) * tr[:, 1] + np.cos(laser_rd) * tr[:, 0]
    tr[:, 1] = X
    tr[:, 0] = Y
    # rotate the orientation
    '''rot_ = Quaternion(axis=[0, 0, 1], radians=laser_rd)
    ori = [rot_*o for o in ori]'''
    print ori
    plt.plot(X, Y)
    plt.axis('equal')
    plt.show()
    print 'Writing the poses in the /scan_pose topic'
    write_pose_to_bag(bag, interp_time, interp_ros_time, ori, tr)
    print 'Re-indexing the bag'
    for done in bag.reindex():
        pass
    bag.close()
    print 'Done'
Esempio n. 16
0
class Em:
    """The atom of the MR simulation"""
    def __init__(self, magnetization, position, gyromagnetic_ratio,
                 shielding_constant, equilibrium_magnetization):
        """Initializes an em
        Params:
            magnetization: (numpy 3-vector) initial magnetization
            position: (numpy 3-vector) intial position
            gyromagnetic_ratio: (float) the gyromagnetic ratio of the nucleus
            shielding_constant: (nonnegative float) shielding constant from chemical environment
            equilibrium_magnetization: (positive float) longitudinal magnetization in thermal equilibrium
        """
        # Check params
        if not (magnetization.dtype == np.float64
                and magnetization.shape == (3, )):
            raise TypeError("magnetization must be a numpy 3-vector")
        if not (position.dtype == np.float64 and position.shape == (3, )):
            raise TypeError("position must be a numpy 3-vector")
        if not isinstance(gyromagnetic_ratio, float):
            raise TypeError("gyromagnetic_ratio must be a float")
        if not (isinstance(shielding_constant, float)
                and shielding_constant >= 0.0):
            raise TypeError("shielding constant must be a nonnegative float")
        if not (isinstance(equilibrium_magnetization, float)
                and equilibrium_magnetization > 0.0):
            raise TypeError(
                "equilibrium_magnetization must be a positive float")
        # Set data attributes
        self.mu = magnetization
        self.r = position
        self.gamma = gyromagnetic_ratio
        self.sigma = shielding_constant
        self.mu0 = equilibrium_magnetization
        self.flip_quaternion = Quaternion(
            1)  # Rotation quaterion for flip from excitation

    def set_shielding_constant(self, new_shielding_constant):
        """Updates shielding constant
        Params:
            new_shielding_constant: (nonnegative float) new shielding constant
        """
        # Check params
        if not (isinstance(new_shielding_constant, float)
                and new_shielding_constant >= 0.0):
            raise TypeError(
                "new_shielding_constant must be a nonnegative float")
        # Update data attribute
        self.sigma = new_shielding_constant

    def diffuse(self, diffusion_coefficients, delta_t):
        """Updates position according to diffusion process
        Params:
            diffusion_coefficients: (numpy 3-vector of nonnegative floats) diffusion coefficients in 3 spatial directions at current position
            delta_t: (positive float) time step
        """
        # Check params
        if not (diffusion_coefficients.shape == (3, )
                and diffusion_coefficients.dtype == np.float64
                and all([item >= 0.0 for item in diffusion_coefficients])):
            raise TypeError(
                "diffusion_coefficients must be a numpy 3-vector of nonnegative floats"
            )
        if not (isinstance(delta_t, float) and delta_t > 0.0):
            raise TypeError("delta_t must be a positive float")
        # Model diffusion
        for ax_no in range(3):
            if (diffusion_coefficients[ax_no] > 0.0):
                self.r[ax_no] = self.r[ax_no] + np.random.normal(
                    0.0, np.sqrt(2 * diffusion_coefficients[ax_no] * delta_t))

    def precess_and_relax(self, T1, T2, Bz, delta_t):
        """Updates magnetization assuming free precession
        Params:
            T1: (positive float) T1 relaxation time
            T2: (positive float) T2 relaxation time
            Bz: (float) longitudinal field
            delta_t: (positive float) time step
        """
        # Check params
        if not (isinstance(T1, float) and T1 > 0.0):
            raise TypeError("T1 must be a positive float")
        if not (isinstance(T2, float) and T2 > 0.0):
            raise TypeError("T2 must be a positive float")
        if not isinstance(Bz, float):
            raise TypeError("Bz must be a float")
        if not (isinstance(delta_t, float) and delta_t > 0.0):
            raise TypeError("delta_t must be a positive float")
        # Rotate magnetization
        Bz = (1 - self.sigma) * Bz  # effective field due to electron shielding
        m = (self.mu[0] + 1j * self.mu[1]) * np.exp(-delta_t / T2 - 1j *
                                                    self.gamma * Bz * delta_t)
        self.mu[0] = m.real
        self.mu[1] = m.imag
        self.mu[2] = self.mu0 + (self.mu[2] - self.mu0) * np.exp(-delta_t / T1)

    def update_flip_quaternion(self, B1x, B1y, Bz, omega_rf, delta_t):
        """Updates flip quaternion assuming excitation
        Params:
            B1x: (float) RF pulse modulation in x direction in rotating frame
            B1y: (float) RF pulse modulation in y direction in rotating frame
            Bz: (float) field in z direction in lab frame
            omega_rf: (positive float) angular carrier frequency of RF pulse
            delta_t: (positive float) time step
        """
        # Check params
        if not isinstance(B1x, float):
            raise TypeError("B1x must be a float")
        if not isinstance(B1y, float):
            raise TypeError("B1y must be a float")
        if not isinstance(Bz, float):
            raise TypeError("Bz must be a float")
        if not (isinstance(omega_rf, float) and omega_rf > 0.0):
            raise TypeError("omega_rf must be a positive float")
        if not (isinstance(delta_t, float) and delta_t > 0.0):
            raise TypeError("delta_t must be a positive float")
        # Compute update to flip quaternion
        Brot = np.array([B1x, B1y, Bz])
        Brot = Brot * (1 - self.sigma
                       )  # effective field due to electron shielding
        Beff = Brot - np.array([0.0, 0.0, omega_rf / self.gamma
                                ])  # effective field in rotating frame
        Beff_norm = np.linalg.norm(Beff)
        flip_axis = Beff / Beff_norm
        flip_angle = -self.gamma * Beff_norm * delta_t
        self.flip_quaternion = self.flip_quaternion * Quaternion(
            axis=flip_axis, angle=flip_angle)

    def flip(self, omega_rf, pulse_duration):
        """Updates magnetization at end of excitation pulse and resets flip quaternion
        Params:
            omega_rf: (positive float) angular carrier frequency of RF pulse
            pulse_duration: (positive float) duration of RF pulse
        """
        # Check params
        if not (isinstance(omega_rf, float) and omega_rf > 0.0):
            raise TypeError("omega_rf must be a positive float")
        if not (isinstance(pulse_duration, float) and pulse_duration > 0.0):
            raise TypeError("pulse_duration must be a positive float")
        # Apply flip quaternion and reset
        self.flip_quaternion = self.flip_quaternion * Quaternion(
            axis=[0, 0, 1], angle=-omega_rf * pulse_duration)
        self.mu = self.flip_quaternion.rotate(self.mu)
        self.flip_quaternion = Quaternion(1)
Esempio n. 17
0
class BoundingBox3D:
    """
    Class for 3D Bounding Boxes (3-orthotope).
    It takes either the center of the 3D bounding box or the \
        back-bottom-left corner, the width, height and length \
        of the box, and quaternion values to indicate the rotation.
    Args:
        x (:py:class:`float`): X axis coordinate of 3D bounding box. \
            Can be either center of bounding box or back-bottom-left corner.
        y (:py:class:`float`): Y axis coordinate of 3D bounding box. \
            Can be either center of bounding box or back-bottom-left corner.
        z (:py:class:`float`): Z axis coordinate of 3D bounding box. \
            Can be either center of bounding box or back-bottom-left corner.
        length (:py:class:`float`, optional): The length of the box (default is 1).
            This corresponds to the dimension along the x-axis.
        width (:py:class:`float`, optional): The width of the box (default is 1).
            This corresponds to the dimension along the y-axis.
        height (:py:class:`float`, optional): The height of the box (default is 1).
            This corresponds to the dimension along the z-axis.
        rw (:py:class:`float`, optional): The real part of the rotation quaternion \
            (default is 1).
        rx (:py:class:`int`, optional): The first element of the quaternion vector \
            (default is 0).
        ry (:py:class:`int`, optional): The second element of the quaternion vector \
            (default is 0).
        rz (:py:class:`int`, optional): The third element of the quaternion vector \
            (default is 0).
        euler_angles (:py:class:`list` or :py:class:`ndarray` of float, optional): \
            Sequence of euler angles in `[x, y, z]` rotation order \
            (the default is None).
        is_center (`bool`, optional): Flag to indicate if the provided coordinate \
            is the center of the box (the default is True).
    """
    def __init__(
        self,
        x,
        y,
        z,
        length=1,
        width=1,
        height=1,
        rw=1,
        rx=0,
        ry=0,
        rz=0,
        q=None,
        euler_angles=None,
        is_center=True,
    ):
        if is_center:
            self._cx, self._cy, self._cz = x, y, z
            self._c = np.array([x, y, z])
        else:
            self._cx = x + length / 2
            self._cy = y + width / 2
            self._cz = z + height / 2
            self._c = np.array([self._cx, self._cy, self._cz])

        self._w, self._h, self._l = width, height, length

        if euler_angles is not None:
            # we need to apply y, z and x rotations in order
            # http://www.euclideanspace.com/maths/geometry/rotations/euler/index.htm
            self._q = (Quaternion(axis=[0, 1, 0], angle=euler_angles[1]) *
                       Quaternion(axis=[0, 0, 1], angle=euler_angles[2]) *
                       Quaternion(axis=[1, 0, 0], angle=euler_angles[0]))

        elif q is not None:
            self._q = Quaternion(q)
        else:
            self._q = Quaternion(rw, rx, ry, rz)

    @classmethod
    def from_center_dimension_euler(cls, center, dimension, euler_angles=None):
        """Factory function to create BoundingBox3D from center, dimension, and euler arrays.
        Can pass in either np.arrays or Python lists.

        Args:
            center (list): list of length 3
            dimension (list): list of length 3
            euler_angles (list, optional): list of length 3. Defaults to None.

        Returns:
            BoundingBox3D: a new 3D bounding box object
        """
        x, y, z = center
        length, width, height = dimension
        return BoundingBox3D(
            x=x,
            y=y,
            z=z,
            length=length,
            width=width,
            height=height,
            euler_angles=euler_angles,
        )

    @classmethod
    def from_xyzxyz(cls, xyz1, xyz2):
        x1, y1, z1 = xyz1
        x2, y2, z2 = xyz2
        length, width, height = abs(x2 - x1), abs(y2 - y1), abs(z2 - z1)
        min_x = min(x1, x2)
        min_y = min(y1, y2)
        min_z = min(z1, z2)

        c_x = min_x + length // 2
        c_y = min_y + width // 2
        c_z = min_z + height // 2

        return BoundingBox3D(x=c_x,
                             y=c_y,
                             z=c_z,
                             length=length,
                             width=width,
                             height=height)

    @property
    def center(self):
        """
        Attribute to access center coordinates of box in (x, y, z) format.
        Can be set to :py:class:`list` or :py:class:`ndarray` of float.
        Returns:
            :py:class:`ndarray` of float: 3-dimensional vector representing \
                (x, y, z) coordinates of the box.
        Raises:
            ValueError: If `c` is not a vector/list of length 3.
        """
        return self._c

    @center.setter
    def center(self, c):
        if len(c) != 3:
            raise ValueError("Center coordinates should be a vector of size 3")
        self._c = c

    def __valid_scalar(self, x):
        if not np.isscalar(x):
            raise ValueError("Value should be a scalar")
        else:  # x is a scalar so we check for numeric type
            if not isinstance(x, (float, int)):
                raise TypeError("Value needs to be either a float or an int")
        return x

    @property
    def cx(self):
        """
        :py:class:`float`: X coordinate of center.
        """
        return self._cx

    @cx.setter
    def cx(self, x):
        self._cx = self.__valid_scalar(x)

    @property
    def cy(self):
        """
        :py:class:`float`: Y coordinate of center.
        """
        return self._cy

    @cy.setter
    def cy(self, x):
        self._cy = self.__valid_scalar(x)

    @property
    def cz(self):
        """
        :py:class:`float`: Z coordinate of center.
        """
        return self._cz

    @cz.setter
    def cz(self, x):
        self._cz = self.__valid_scalar(x)

    @property
    def q(self):
        """
        Syntactic sugar for the rotation quaternion of the box.
        Returns
            :py:class:`ndarray` of float: Quaternion values in (w, x, y, z) form.
        """
        return np.hstack((self._q.real, self._q.imaginary))

    @q.setter
    def q(self, q):
        if not isinstance(q, (list, tuple, np.ndarray, Quaternion)):
            raise TypeError(
                "Value shoud be either list, numpy array or Quaterion")
        if isinstance(q, (list, tuple, np.ndarray)) and len(q) != 4:
            raise ValueError("Quaternion input should be a vector of size 4")

        self._q = Quaternion(q)

    @property
    def quaternion(self):
        """
        The rotation quaternion.
        Returns:
            :py:class:`ndarray` of float: Quaternion values in (w, x, y, z) form.
        """
        return self.q

    @quaternion.setter
    def quaternion(self, q):
        self.q = q

    @property
    def length(self):
        """
        :py:class:`float`: Length of the box.
        """
        return self._l

    @length.setter
    def length(self, x):
        self._l = self.__valid_scalar(x)

    @property
    def width(self):
        """
        :py:class:`float`: The width of the box.
        """
        return self._w

    @width.setter
    def width(self, x):
        self._w = self.__valid_scalar(x)

    @property
    def height(self):
        """
        :py:class:`float`: The height of the box.
        """
        return self._h

    @height.setter
    def height(self, x):
        self._h = self.__valid_scalar(x)

    def __transform(self, x):
        """
        Rotate and translate the point to world coordinates.
        """
        y = self._c + self._q.rotate(x)
        return y

    @property
    def p1(self):
        """
        :py:class:`float`: Back-left-bottom point.
        """
        p = np.array([-self._l / 2, -self._w / 2, -self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p2(self):
        """
        :py:class:`float`: Back-right-bottom point.
        """
        p = np.array([-self._l / 2, self._w / 2, -self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p3(self):
        """
        :py:class:`float`: Front-right-bottom point.
        """
        p = np.array([self._l / 2, self._w / 2, -self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p4(self):
        """
        :py:class:`float`: Front-left-bottom point.
        """
        p = np.array([self._l / 2, -self._w / 2, -self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p5(self):
        """
        :py:class:`float`: Back-left-top point.
        """
        p = np.array([-self._l / 2, -self._w / 2, self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p6(self):
        """
        :py:class:`float`: Back-right-top point.
        """
        p = np.array([-self._l / 2, self._w / 2, self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p7(self):
        """
        :py:class:`float`: Front-right-top point.
        """
        p = np.array([self._l / 2, self._w / 2, self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p8(self):
        """
        :py:class:`float`: Front-left-top point.
        """
        p = np.array([self._l / 2, -self._w / 2, self._h / 2])
        p = self.__transform(p)
        return p

    @property
    def p(self):
        """
        Attribute to access ndarray of all corners of box in order.
        Returns:
            :py:class:`ndarray` of float: All corners of the bounding box in order.
            The order goes bottom->top and clockwise starting from the bottom-left
            point.
        """
        x = np.vstack([
            self.p1, self.p2, self.p3, self.p4, self.p5, self.p6, self.p7,
            self.p8
        ])
        return x

    def __repr__(self):
        template = (
            "BoundingBox3D(x={cx}, y={cy}, z={cz}), length={l}, width={w}, height={h}, "
            "q=({rw}, {rx}, {ry}, {rz}))")
        return template.format(
            cx=self._cx,
            cy=self._cy,
            cz=self._cz,
            l=self._l,
            w=self._w,
            h=self._h,
            rw=self._q.real,
            rx=self._q.imaginary[0],
            ry=self._q.imaginary[1],
            rz=self._q.imaginary[2],
        )

    def copy(self):
        return deepcopy(self)

    @property
    def triangle_vertices(self):
        """Get triangle vertices to use when plotting a triangular mesh

        Returns:
            np.array: Triangular vertices of the cube
        """
        # Triangle vertex connections (for triangle meshes)
        i = [7, 0, 0, 0, 4, 4, 6, 6, 4, 0, 3, 2]
        j = [3, 4, 1, 2, 5, 6, 5, 2, 0, 1, 6, 3]
        k = [0, 7, 2, 3, 6, 7, 1, 1, 5, 5, 7, 6]
        triangle_vertices = np.vstack([i, j, k])
        return triangle_vertices

    @property
    def edges(self):
        """Get the edge connections for the cube

        Returns:
            np.array: list of edges of the cube
        """
        # Create the edges
        edges = np.array([
            [0, 1],
            [0, 3],
            [0, 4],
            [1, 2],
            [1, 5],
            [2, 3],
            [2, 6],
            [3, 7],
            [4, 5],
            [4, 7],
            [5, 6],
            [6, 7],
        ])
        return edges
def get_groundtruth_scores(constel_files, groundtruth_file):
    """Finds the distances between scenes based on positions and orientations information of frames. Estimates a scene position with distances to objects in it.

    Arguments:
        constel_files {list} -- List of strings representing the name of all the constellation txt files.
        groundtruth_file {string} -- String containing the path to the groundtruth txt file which has timestamps and poses

    Returns:
        2d numpy array -- Array of size nb_frames x nb_frames with the distance between two estimated scene positions.
    """
    # Get the timestamps of all the frames
    timestamps = np.array(
        [float(file.split('/')[-1].strip('.txt')) for file in constel_files])

    distances_frame = []
    # Get the average z positions (distances) of all the frames
    for constel_file in constel_files:
        z = 0.
        with open(constel_file) as f:
            point_from_objs = [line.split() for line in f]
        for line in point_from_objs:
            z += float(line[-1])
        if len(point_from_objs) > 0:
            z = z / len(point_from_objs)
        distances_frame.append(z)

    # Load all the poses
    poses_non_sync = np.loadtxt(groundtruth_file)

    # Get the poses closest to the timestamps of the frames
    poses_frames = []
    i = 0
    for time in timestamps:
        # Go through the times of the groundtruth data until we get a higher value than the one we have
        while (time > poses_non_sync[i][0]):
            i += 1
        # When we found the limit, choose the limit value or the one before depending on which is closest
        if (poses_non_sync[i][0] - time) > (time - poses_non_sync[i - 1][0]):
            poses_frames.append(poses_non_sync[i - 1][1:])
        else:
            poses_frames.append(poses_non_sync[i][1:])
    poses_frames = np.array(poses_frames)

    # Estimate points in front of the cone based on poses and distances
    points_in_front_cone = np.zeros((len(poses_frames), 3))
    for i in range(len(poses_frames)):
        pose = poses_frames[i]
        vec_front_cam = np.array([0., 0., distances_frame[i]])
        q = Quaternion([pose[-1], pose[3], pose[4], pose[5]])
        v_front_ori = q.rotate(vec_front_cam)
        points_in_front_cone[i] = pose[:3] + v_front_ori

    cones_distances = np.zeros((len(poses_frames), len(poses_frames)))
    for i in range(len(poses_frames)):
        for j in range(i, len(poses_frames)):
            pos1 = points_in_front_cone[i]
            pos2 = points_in_front_cone[j]
            dist = math.sqrt(sum((pos1 - pos2)**2))

            # Filter the values based on manually imposed rules defined in the filter_dist_freiburg3 function
            dist = filter_dist_freiburg3(poses_frames[i], poses_frames[j],
                                         pos1, pos2, dist)

            cones_distances[i, j] = dist
            cones_distances[j, i] = dist

    # Filter ignored neighbors
    for i in range(settings.loop_closure_neighbors_ignored + 1):
        np.fill_diagonal(cones_distances[i:], np.inf)
        np.fill_diagonal(cones_distances[:, i:], np.inf)

    # Put the scores between 0(= high distance) and 1 (= distance of 0)
    gt_scores = 1 - (cones_distances /
                     np.max(cones_distances[cones_distances < 10000]))
    return gt_scores, cones_distances
Esempio n. 19
0
def doublette(whichTrees, plots=True, output=True):
    
    ############################
    depth_limit = 8.
    cam_distance_threshold = 2.
    frame_size = 30

    match_found = 0
    non_match_found = 0
    first_img_found = 0
    ############################

    fx, fy, cx, cy = get_cam_intrinsics()
    drone_cam_translation = np.array([[1., 0., 0., 0.], [0., 1., 0., 0.], [0., 0., 1., 0.46], [0., 0., 0., 1.]])
    print('fx, fy, cx, cy...............', fx, fy, cx, cy)

    rotmat_x = build_rot_mat(-.5*np.pi, 'x')
    rotmat_y = build_rot_mat(-.5*np.pi, 'y')


    while (match_found < 1 and non_match_found < 1):

        print()
        print('finding initial camera position with visible tree..........................')
        print()

        while first_img_found < 1:
        
            '''pick a random image number for snapshot1'''
            if whichTrees == 'random':

                random_tree_kind = np.random.randint(len(trees))
                random_tree_kind_str = trees[random_tree_kind]
                random_tree = np.random.randint(num_tree_folders)
                random_tree_path = DataPath + random_tree_kind_str + '/' + random_tree_kind_str + '_High' + str(random_tree) + '/'

            if whichTrees == 'debug':

                random_tree_kind = 0
                random_tree_kind_str = trees[random_tree_kind]
                random_tree = random_tree_debug
                random_tree_path = DataPath + random_tree_kind_str + '/' + random_tree_kind_str + '_High' + str(random_tree) + '/'

            if whichTrees == 'debug_tejaswi':

                random_tree_kind = 0
                random_tree_kind_str = trees[random_tree_kind]
                random_tree = 1
                random_tree_path = DataPath + random_tree_kind_str + '/' + random_tree_kind_str + '_High' + str(random_tree) + '/'

            if whichTrees == 'Cherry':

                random_tree_kind = 0
                random_tree_kind_str = trees[random_tree_kind]
                random_tree = np.random.randint(num_tree_folders)
                random_tree_path = DataPath + random_tree_kind_str + '/' + random_tree_kind_str + '_High' + str(random_tree) + '/'

            if whichTrees == 'KoreanStewartia':

                random_tree_kind = 1
                random_tree_kind_str = trees[random_tree_kind]
                random_tree = np.random.randint(num_tree_folders)
                random_tree_path = DataPath + random_tree_kind_str + '/' + random_tree_kind_str + '_High' + str(random_tree) + '/'

            '''find total number of frames in respective folder'''
            data_table = np.asarray(pd.read_table(random_tree_path + 'poses.txt' , sep='\s', header=0, index_col=False, engine='python'))
            num_frames = np.max(data_table[:,0])

            abc = np.squeeze(np.array(np.where(data_table[:,7] == 1)))
            mean_x = np.mean(data_table[abc[0]:abc[-1],1])
            mean_y = np.mean(data_table[abc[0]:abc[-1],2])

            '''find random initial frame'''
            snapshot1_index = np.random.randint(num_frames)

            if whichTrees == 'debug':
                snapshot1_index = snapshot1_index_debug

            '''load respective depth img'''
            file_index1 = file_index(snapshot1_index)

            depthImg1 = convert_pfm(random_tree_path + file_index1 + 'pl.pfm')

            '''delete too far away points'''
            depthImg1[depthImg1 > depth_limit] = 0 

            '''check if there is a tree'''
            if np.array(np.nonzero(depthImg1[frame_size:-frame_size,frame_size:-frame_size])).shape[1] > 10:

                first_img_found = 1

                if output:
                    print('num_frames...............', num_frames)
                    print('random_tree_kind.........', random_tree_kind)
                    print('random_tree_kind_str.....', random_tree_kind_str)
                    print('random_tree..............', random_tree)
                    print('random_tree_path.........', random_tree_path)
                    print('index1...................', snapshot1_index)
                    print(random_tree_path + file_index1 + 'pl.pfm')
                    print()
                    print('=======================================================> initial frame found')
                    print()


        '''load the segmentation'''
        segmentation1 = imageio.imread(random_tree_path + file_index1 + 'seg.ppm')
        segmentation1 = np.round(np.mean(segmentation1, 2) / np.max(np.asarray(np.reshape(np.mean(segmentation1,2), (-1)))), 1)

        '''delete depth image parts without tree'''
        # depthImg1[.7 < segmentation1 and segmentation1 < .9] = 0

        '''choose random point on tree'''
        rand_row, rand_col = 0, 0

        while np.absolute(cy - rand_row) > (cy - frame_size) or np.absolute(cx - rand_col) > (cx - frame_size):
            random_spot1 = np.random.randint(0, np.array(np.nonzero(depthImg1)).shape[1])
            rand_row, rand_col = np.array(np.nonzero(depthImg1))[:,random_spot1]

        if whichTrees == 'debug':
            rand_row, rand_col = rand_row_debug, rand_col_debug      

        '''label point'''
        point_label = segmentation1[rand_row, rand_col]

        '''get 1st cam position and quaternions'''
        cam_position1 = get_cam_position(snapshot1_index, random_tree_path)
        qx1 = cam_position1[3]
        qy1 = cam_position1[4]
        qz1 = cam_position1[5]
        if qz1 == 0.:
            qz1 = 1e-04
        qw1 = cam_position1[6]

        quaternion_cam1 = Quaternion(qw1, qx1, qy1, qz1)

        angle1 = -2 * np.arccos(cam_position1[-1])

        rotmat_z1 = build_rot_mat(angle1, 'z')

        lens1_position = np.dot(rotmat_z1, np.dot(rotmat_x, np.dot(rotmat_y, drone_cam_translation[:3,3])) * np.array([-1.,1., 1.])) + np.transpose(cam_position1[:3])
        x1, y1, z1 = lens1_position[:3]

        
        if output:
            print('rand_row, rand_col.......', rand_row, rand_col)
            print('label 1st point..........', point_label)
            print('x1, y1, z1..........', np.round(x1, 2), np.round(y1, 2), np.round(z1, 2))
            print()
            print('=======================================================> 1st frame fully loaded')
            print()


        '''find 2nd camera position in proximity'''
        cam_distance = 100.
        loop_cam2_search = 0

        while cam_distance > cam_distance_threshold:

            '''choose random and different 2nd point'''
            snapshot2_index = snapshot1_index

            while snapshot2_index == snapshot1_index:
                snapshot2_index = np.random.randint(num_frames)

            if whichTrees == 'debug':
                snapshot2_index = snapshot2_index_debug

            file_index2 = file_index(snapshot2_index)

            '''get 2nd cam position and quaternion'''
            cam_position2 = get_cam_position(snapshot2_index, random_tree_path)
            qx2 = cam_position2[3]
            qy2 = cam_position2[4]
            qz2 = cam_position2[5]
            if qz2 == 0.:
                qz2 = 1e-04
            qw2 = cam_position2[6]

            quaternion_cam2 = Quaternion(qw2, qx2, qy2, qz2)

            angle2 = -2 * np.arccos(cam_position2[-1])

            rotmat_z2 = build_rot_mat(angle2, 'z')

            lens2_position = np.dot(rotmat_z2, np.dot(rotmat_x, np.dot(rotmat_y, drone_cam_translation[:3,3])) * np.array([-1.,1., 1.])) + np.transpose(cam_position2[:3])
            x2, y2, z2 = lens2_position[:3]

            '''check if relative distance under threshold'''
            cam_distance = dist_calc(np.array([x1, y1, z1]), np.array([x2, y2, z2]))


        if output:
            print('snapshot2_index..........', snapshot2_index)
            print('x2, y2, z2...............', np.round(x2, 2), np.round(y2, 2), np.round(z2, 2))
            print('cam_distance.............', np.round(cam_distance, 2))
            print(random_tree_path + file_index2 + 'pl.pfm')
            print()
            print('=======================================================> 2nd frame found')
            print()


        '''Compute transformtation'''
        angle_delta = angle2 - angle1
        translation = np.transpose(lens2_position - lens1_position)

        '''compute sigma1 -> point coordinates'''
        cam1_2_point_distance = depthImg1[rand_row, rand_col]
        d1 = cam1_2_point_distance

        '''translate row, col to cam-coords'''
        u1, v1 = rand_col, 2 * cy - rand_row

        alpha1 = (u1 - cx) * d1 / fx
        beta1 = (v1 - cy) * d1 / fy
        gamma1 = d1

        sigma1 = np.transpose(np.array([alpha1, beta1, gamma1, 1.]))


        sigma1_in_worldcoords = np.dot(rotmat_x, np.dot(rotmat_y, sigma1[:3])) * np.array([-1., 1., 1.])
        # sigma1_in_worldcoords = np.dot(rotmat_x, np.dot(rotmat_y, sigma1[:3])) * np.array([1., 1., -1.]) # delte y rotation

        real_point = np.dot(rotmat_z1, sigma1_in_worldcoords) + np.transpose(lens1_position)

        lens2_2realpoint = real_point - lens2_position

        '''bis hier stimmts!!!!!!!!'''




########################################################################################################################################################
########################################################################################################################################################
########################################################################################################################################################


        '''should be correct up to this point!!!'''
        ''' sigma_1 = point in camera1 coordinate system: x: horizontal, y: vertical, z: 'depth' '''
        ''' translation = lens2_position - lens1_position '''

        '''rotate pc1 (sigma1) around x by pi/2 '''
        sigma1_x_rotated = np.dot(build_rot_mat(.5*np.pi, 'x'), sigma1[:3])

        '''flip z-axis'''
        sigma1_x_rotated_flipped_z = np.transpose(np.array([sigma1_x_rotated[0], sigma1_x_rotated[1], -1*sigma1_x_rotated[2]]))

        inv_rotation_angle1 = np.linalg.inv(build_rot_mat(angle1, 'z'))

        Pw_notranslation = np.dot(inv_rotation_angle1, sigma1_x_rotated_flipped_z)

        Pw = Pw_notranslation - translation

        rotation_angle2 = build_rot_mat(angle2, 'z')
        PwCam2 = np.dot(rotation_angle2, Pw)

        PwCam2_flipped = np.array([PwCam2[0], PwCam2[1], -1*PwCam2[2]])
        PwCam2_flipped_back_rotated = np.dot(np.linalg.inv(build_rot_mat(.5*np.pi, 'x')), PwCam2_flipped)

        print()
        print('sigma1_x_rotated')
        print(sigma1_x_rotated)
        print()
        print('sigma1_x_rotated_flipped_z')
        print(sigma1_x_rotated_flipped_z)
        print()
        print('inv_rotation_angle1')
        print(inv_rotation_angle1)
        print()
        print('Pw_notranslation')
        print(Pw_notranslation)
        print()
        print('Pw')
        print(Pw)
        print()
        print('PwCam2')
        print(PwCam2)
        print()
        print('PwCam2_flipped_back_rotated')
        print(PwCam2_flipped_back_rotated)




        rotated_translation = np.dot(build_rot_mat(1*(angle2), 'z'), translation)
        print('rotated_translation')
        print(rotated_translation)
        print('quaternion_cam2.rotate(translation)')
        print(quaternion_cam2.rotate(translation))


        sigma2 = np.dot(build_rot_mat(1*(angle2 - angle1), 'y'), sigma1[:3])# - np.dot(build_rot_mat(angle2, 'z'), translation)

        sigma2 = quaternion_cam1.inverse.rotate(sigma1_in_worldcoords) 


        # alpha2 = sigma2[0] + rotated_translation[1]
        # beta2 = sigma2[1] + rotated_translation[2] #always correct
        # gamma2 = sigma2[2] - rotated_translation[0]

        
        alpha2 = PwCam2_flipped_back_rotated[0]
        beta2 = PwCam2_flipped_back_rotated[1]
        gamma2 = PwCam2_flipped_back_rotated[2]

        d2 = gamma2



        u2 = (alpha2 * fx / d2) + cx
        v2 = (beta2 * fy / d2) + cy

        col2 = u2
        row2 = 2 * cy - v2



        sigma22 = np.transpose(np.array([alpha2, beta2, gamma2, 1.]))

        sigma2_in_worldcoords = np.dot(rotmat_x, np.dot(rotmat_y, sigma22[:3])) * np.array([-1., 1., 1.])

        real_point2 = np.dot(rotmat_z2, sigma2_in_worldcoords) + np.transpose(lens2_position)



        ptCam = sigma22[:3]



        # further processing

        # Compute bounding box in pixel coordinates
        bboxRange = np.array([
            [ptCam[0]-voxelGridPatchRadius*voxelSize, ptCam[0]+voxelGridPatchRadius*voxelSize], 
            [ptCam[1]-voxelGridPatchRadius*voxelSize, ptCam[1]+voxelGridPatchRadius*voxelSize],
            [ptCam[2]-voxelGridPatchRadius*voxelSize, ptCam[2]+voxelGridPatchRadius*voxelSize]])
        
        bboxCorners = np.array([
            [bboxRange[0,0],bboxRange[0,0],bboxRange[0,0],bboxRange[0,0],bboxRange[0,1],bboxRange[0,1],bboxRange[0,1],bboxRange[0,1]],
            [bboxRange[1,0],bboxRange[1,0],bboxRange[1,1],bboxRange[1,1],bboxRange[1,0],bboxRange[1,0],bboxRange[1,1],bboxRange[1,1]],
            [bboxRange[2,0],bboxRange[2,1],bboxRange[2,0],bboxRange[2,1],bboxRange[2,0],bboxRange[2,1],bboxRange[2,0],bboxRange[2,1]]])

        bboxRange = np.reshape(bboxRange,(3,2))

        bboxCorners = np.reshape(bboxCorners,(3,8))

        p1_bboxCornersCam = bboxCorners



        if output:
            print('translation..............', np.round(translation, 2))
            print('angle1...................', np.round(angle1, 2))
            print('angle2...................', np.round(angle2, 2))
            print('angle delta..............', np.round(angle_delta, 2))
            print()
            print('u1, v1...................', u1, v1)
            print('sigma1...................', np.round(sigma1, 2))
            print('sigma1_in_worldcoords....', np.round(sigma1_in_worldcoords, 2))
            print('real_point...............', np.round(real_point, 2))
            print()
            print('real_point2..............', np.round(real_point2, 2))
            print()
            print('point dist...............', np.round(dist_calc(real_point, real_point2), 2))
            print()
            print('alpha2, beta2, gamma2....', np.round(np.array([alpha2, beta2, gamma2]), 2))
            print('sigma2...................', np.round(sigma2, 2))
            print('sigma2_in_worldcoords....', np.round(sigma2_in_worldcoords, 2))
            print('lens2_2realpoint.........', np.round(lens2_2realpoint, 2))
            print('u2, v2...................', np.round(u2), np.round(v2))
            print('row2, col2...............', np.round(row2), np.round(col2))
            print()
            print('=======================================================> transformation done')
            print()

        if plots:

            '''load respective depth img'''
            depthImg2 = convert_pfm(random_tree_path + file_index2 + 'pl.pfm')
            depthImg2[depthImg2 > depth_limit] = 0


            real_point_fromcam2 = np.dot(build_rot_mat(angle2, 'z'), sigma2_in_worldcoords) + np.transpose(lens2_position)

            test_mat1 = build_rot_mat(angle1, 'z')
            test_mat2 = build_rot_mat(angle2, 'z')
            # test_mat1 = build_trafo_mat(x1, y1, z1, angle1)
            # test_mat2 = build_trafo_mat(x2, y2, z2, angle2)

            optical_axis1 = np.dot(test_mat1[0:3,0:3],np.transpose(np.array([gamma1, 0., 0.]))) + np.transpose(lens1_position)
            optical_axis2 = np.dot(test_mat2[0:3,0:3],np.transpose(np.array([gamma2, 0., 0.]))) + np.transpose(lens2_position)

            optical_axis1_x = np.linspace(optical_axis1[0], lens1_position[0], 10)
            optical_axis1_y = np.linspace(optical_axis1[1], lens1_position[1], 10)
            optical_axis1_z = np.linspace(optical_axis1[2], lens1_position[2], 10)
            
            optical_axis2_x = np.linspace(optical_axis2[0], lens2_position[0], 10)
            optical_axis2_y = np.linspace(optical_axis2[1], lens2_position[1], 10)
            optical_axis2_z = np.linspace(optical_axis2[2], lens2_position[2], 10)
            
            point2cam1_x = np.linspace(real_point[0], lens1_position[0], 10)
            point2cam1_y = np.linspace(real_point[1], lens1_position[1], 10)
            point2cam1_z = np.linspace(real_point[2], lens1_position[2], 10)
   
            point2cam2_x = np.linspace(real_point[0], lens2_position[0], 10)
            point2cam2_y = np.linspace(real_point[1], lens2_position[1], 10)
            point2cam2_z = np.linspace(real_point[2], lens2_position[2], 10)
   




            plt.figure()
            plt.plot(data_table[:,1], data_table[:,2], c='g', label='drone flight')
            plt.scatter(x1, y1, c='r', label='cam_position1')
            plt.scatter(x2, y2, c='b', label='cam_position2')
            plt.scatter(x2, y2, c='b', label='cam_position2')
            plt.plot(optical_axis1_x, optical_axis1_y, c='k', label='center1')
            plt.plot(point2cam1_x, point2cam1_y, c='r', label='sight1')
            plt.plot(point2cam2_x, point2cam2_y, c='b', label='sight2')
            plt.plot(optical_axis2_x, optical_axis2_y, c='k', label='center2')            
            plt.scatter(real_point[0], real_point[1], c='r', label='real_point1')
            plt.grid()
            plt.legend()


            fig = plt.figure()
            ax = fig.add_subplot(111, projection='3d')

            ax.scatter(x1, y1, z1, c='r', label='cam_position1')
            ax.scatter(x2, y2, z2, c='b', label='cam_position2')

            ax.scatter(bboxCorners[0,:] + real_point[0], bboxCorners[1,:] + real_point[1], bboxCorners[2,:] + real_point[2], c='g', label='corners')

            ax.plot(data_table[:,1], data_table[:,2], data_table[:,3], c='y', label='drone flight')

            # ax.plot(trans_vec_x, trans_vec_y, trans_vec_z, c='m', label='translation')

            ax.plot(optical_axis1_x, optical_axis1_y, optical_axis1_z, c='k', label='center1')
            ax.plot(point2cam1_x, point2cam1_y, point2cam1_z, c='r', label='sight1')
            ax.plot(point2cam2_x, point2cam2_y, point2cam2_z, c='b', label='sight2')

            ax.plot(optical_axis2_x, optical_axis2_y, optical_axis2_z, c='k', label='center2')

            ax.scatter(real_point[0], real_point[1], real_point[2], c='g', label='real_point1')
            ax.scatter(real_point2[0], real_point2[1], real_point2[2], c='b', label='real_point2')
            # ax.scatter(real_point_fromcam2[0], real_point_fromcam2[1], real_point_fromcam2[2], c='g', label='real_point2')

            
            for p in range(-60, 0):
                if p == -10:
                    ax.scatter(mean_x, mean_y, p/10., c='k', label='tree')
                else:
                    ax.scatter(mean_x, mean_y, p/10., c='k')

            ax.legend()

            ax.set_xlabel('X Label')
            ax.set_ylabel('Y Label')
            ax.set_zlabel('Z Label')
            # ax.set_xlim(-10, 10.)
            # ax.set_ylim(-10, 10.)
            # ax.set_zlim(-10., 0.)


            testimage1 = imageio.imread(random_tree_path + file_index1 + '.ppm')
            testimage2 = imageio.imread(random_tree_path + file_index2 + '.ppm')


            plt.figure(figsize=(10,10))

            plt.subplot(221)
            plt.imshow(depthImg1)
            # plt.scatter(rand_col, rand_row, c="r")
            plt.scatter(rand_col, rand_row, 80, 'w', 'x')
            #plt.scatter(np.array(np.nonzero(depthImg1))[1,:],np.array(np.nonzero(depthImg1))[0,:])
            plt.title('considered point:' + str(point_label))
            plt.grid()

            plt.subplot(222)
            plt.imshow(depthImg2)
            plt.title('corresponding view point')
            plt.scatter(col2, row2, 80, 'w', 'x')
            # plt.scatter(u2, v2, 80, 'y', 'x')
            # plt.scatter(pixX, 2 * cy - pixY, 80, 'r', 'x')
            # plt.scatter(pixX, pixY, 80, 'r', 'x')
            plt.grid()

            plt.subplot(223)
            plt.imshow(testimage1)
            plt.title('image 1')
            plt.scatter(rand_col, rand_row, 80, 'r', 'x')
            plt.grid()

            plt.subplot(224)
            plt.imshow(testimage2)
            plt.title('image 2')
            plt.scatter(col2, row2, 80, 'r', 'x')
            # plt.scatter(u2, v2, 80, 'y', 'x')
            # plt.scatter(pixX, 2 * cy - pixY, 80, 'r', 'x')
            # plt.scatter(pixX, pixY, 80, 'r', 'x')
            plt.grid()

            plt.show()

        ################
        match_found = 1
        non_match_found = 1
Esempio n. 20
0
inital = [
    -0.0012867963980835793, -0.35302556809186536, 0.4085084665890285,
    -0.0498093375695251, -0.6912765989420319, 0.7208648458793617,
    0.0030931571809757674
]
target = [
    -0.003197180674706364, -0.37576393689660575, 0.45137435818768146,
    -0.0021967960033876664, -0.6684438218043409, 0.7437414872148852,
    -0.005160559496475987
]
target_q = Quat(np.roll(target[3:], 1))

steps = 300

p2 = np.zeros(3)
p1 = target_q.rotate(np.array(inital[:3]) - target[:3])

traj = get_conical_helix_trajectory(p1, p2, steps, 3.0)

traj = np.apply_along_axis(target_q.rotate, 1, traj)
traj = traj + target[:3]

rospy.init_node('path_node')

path = Path()
h = Header()
h.stamp = rospy.Time.now()
h.frame_id = "base_link"
path.header = h
for t in traj:
    p = PoseStamped()
Esempio n. 21
0
    def plotObject(self, name, startTime, y0, color=0, sampleRateMod=4, rotation=False,
                     line=False, checkpoints=0, trim=False, boxsize=5):
        Xsamples = y0[0::self.sampleRate]
        if(trim):
            Xsamples = self.trimPositions(Xsamples,boxsize)
            
        colors = np.zeros((Xsamples.shape[0],3))
        i = 0
        while(i < colors.shape[0]): 
            if(color == 0):
                colors[i,0] = i / colors.shape[0]
                colors[i,1] = 0
                colors[i,2] = 0
            elif(color == 1):
                colors[i,0] = 0
                colors[i,1] = i / colors.shape[0]
                colors[i,2] = 0
            elif(color == 2):
                colors[i,0] = 0
                colors[i,1] = 0
                colors[i,2] = i / colors.shape[0]
            i += 1
        
        #plot the sampled position points with a color gradient
        #self.ax.scatter(Xsamples[:,0], Xsamples[:,1], Xsamples[:,2], 
            #label='X, every {0} ticks'.format(self.sampleRate), color=colors)
        
        if(line):
            self.ax.plot(Xsamples[:,0], Xsamples[:,1], Xsamples[:,2], color=colors[colors.shape[0]-1])
        else:
            self.ax.scatter(Xsamples[:,0], Xsamples[:,1], Xsamples[:,2], color=colors)
        
        if(checkpoints > 0): #pick out points to mark as checkpoints
            csampleRate = int(Xsamples.shape[0] / checkpoints)
            checkPointSamples = Xsamples[0::csampleRate]
            self.ax.scatter(checkPointSamples[:,0], checkPointSamples[:,1], checkPointSamples[:,2], color=(0.,0.,0.))
            
    
        if(rotation):
            Z_line = np.ones((y0.shape[0],3))
            Y_line = np.ones((y0.shape[0],3))
            X_line = np.ones((y0.shape[0],3)) 
            r = y0[:,3:7]
            i = 0
            while(i < y0.shape[0]):
                tq = Quaternion(r[i])
                trZ = tq.rotate(np.array([0.,0.,1.]))
                trY = tq.rotate(np.array([0.,1.,0.]))
                trX = tq.rotate(np.array([1.,0.,0.]))
                Z_line[i] = y0[i,0:3] + trZ
                Y_line[i] = y0[i,0:3] + trY
                X_line[i] = y0[i,0:3] + trX
                i += 1
            
            f = X_line.shape[0]
            objectStartPosition = y0[0,0:3]
            lineStartPosition = X_line[0]
            axisStart = np.vstack((objectStartPosition,lineStartPosition))
            objectEndPosition = y0[y0.shape[0]-1,0:3]
            lineEndPoint = X_line[f-1]
            axisEnd = np.vstack((objectEndPosition,lineEndPoint))

            self.ax.plot(X_line[:,0], X_line[:,1], X_line[:,2], label='X+rot*[1,0,0]   X', color=(1,0,0))
            self.ax.scatter(X_line[0,0], X_line[0,1], X_line[0,2], color='r', marker='o')                  #circle = start
            self.ax.scatter(X_line[f-1:f,0], X_line[f-1:f,1], X_line[f-1:f,2], color='r', marker='^')      #triangle = end
            self.ax.plot(axisStart[:,0],axisStart[:,1],axisStart[:,2], color = (1.,1.,.4)) 
            #line between the first position and the first X,Y,or Z marker
            self.ax.plot(axisEnd[:,0],axisEnd[:,1],axisEnd[:,2], color = 'k') 
            #line between last position and the last X,Y,or Z marker
        
            f = Y_line.shape[0]
            objectStartPosition = y0[0,0:3]
            lineStartPosition = Y_line[0]
            axisStart = np.vstack((objectStartPosition,lineStartPosition))
            objectEndPosition = y0[y0.shape[0]-1,0:3]
            lineEndPoint = Y_line[f-1]
            axisEnd = np.vstack((objectEndPosition,lineEndPoint))
        
            self.ax.plot(Y_line[:,0], Y_line[:,1], Y_line[:,2], label='X+rot*[0,1,0]   Y', color=(0,1,0))
            self.ax.scatter(Y_line[0,0], Y_line[0,1], Y_line[0,2], color='r', marker='o')                  #circle = start
            self.ax.scatter(Y_line[f-1:f,0], Y_line[f-1:f,1], Y_line[f-1:f,2], color='r', marker='^')      #triangle = end
            self.ax.plot(axisStart[:,0],axisStart[:,1],axisStart[:,2], color = (1.,1.,.4)) 
            #line between the first position and the first X,Y,or Z marker
            self.ax.plot(axisEnd[:,0],axisEnd[:,1],axisEnd[:,2], color = 'k') 
            #line between last position and the last X,Y,or Z marker
        
            f = Z_line.shape[0]
            objectStartPosition = y0[0,0:3]
            lineStartPosition = Z_line[0]
            axisStart = np.vstack((objectStartPosition,lineStartPosition))
            objectEndPosition = y0[y0.shape[0]-1,0:3]
            lineEndPoint = Z_line[f-1]
            axisEnd = np.vstack((objectEndPosition,lineEndPoint))

            self.ax.plot(Z_line[:,0], Z_line[:,1], Z_line[:,2], label='X+rot*[0,0,1]   Z', color=(0,0,1))
            self.ax.scatter(Z_line[0,0], Z_line[0,1], Z_line[0,2], color='r', marker='o')                  #circle = start
            self.ax.scatter(Z_line[f-1:f,0], Z_line[f-1:f,1], Z_line[f-1:f,2], color='r', marker='^')      #triangle = end
            self.ax.plot(axisStart[:,0],axisStart[:,1],axisStart[:,2], color = (1.,1.,.4)) 
            #line between the first position and the first X,Y,or Z marker
            self.ax.plot(axisEnd[:,0],axisEnd[:,1],axisEnd[:,2], color = 'k') 
def doublette(whichTrees):

    ############################
    depth_limit = 8.
    cam_distance_threshold = 3.5
    point_dist_threshold = 1.5
    gamma_tolerance = .2
    frame_size = voxelGridPatchRadius
    max_iter = 1

    plots = True
    output = True

    match_found = 0
    non_match_found = 0
    first_img_found = 0
    ############################

    fx, fy, cx, cy = get_cam_intrinsics()
    drone_cam_translation = np.array([[1., 0., 0., 0.], [0., 1., 0., 0.],
                                      [0., 0., 1., .46], [0., 0., 0., 1.]])
    # print('fx, fy, cx, cy...............', fx, fy, cx, cy)

    print()
    print(
        'finding initial camera position with visible tree..........................'
    )
    print()

    while match_found < 1:

        gamma_loop = 0
        frustum_loop = 0
        bbox_loop = 0

        first_img_found = 0

        while first_img_found < 1:
            '''pick a random image number for snapshot1'''
            if whichTrees == 'random':

                random_tree_kind = np.random.randint(len(trees))
                random_tree_kind_str = trees[random_tree_kind]
                random_tree = np.random.randint(num_tree_folders)
                random_tree_path = DataPath + random_tree_kind_str + '/' + random_tree_kind_str + '_High' + str(
                    random_tree) + '/'

            if whichTrees == 'debug':

                random_tree_kind = 0
                random_tree_kind_str = trees[random_tree_kind]
                random_tree = random_tree_debug
                random_tree_path = DataPath + random_tree_kind_str + '/' + random_tree_kind_str + '_High' + str(
                    random_tree) + '/'

            if whichTrees == 'debug_tejaswi':

                random_tree_kind = 0
                random_tree_kind_str = trees[random_tree_kind]
                random_tree = 1
                random_tree_path = DataPath + random_tree_kind_str + '/' + random_tree_kind_str + '_High' + str(
                    random_tree) + '/'

            if whichTrees == 'Cherry':

                random_tree_kind = 0
                random_tree_kind_str = trees[random_tree_kind]
                random_tree = np.random.randint(1, num_tree_folders)
                random_tree_path = DataPath + random_tree_kind_str + '/' + random_tree_kind_str + '_High' + str(
                    random_tree) + '/'

            if whichTrees == 'KoreanStewartia':

                random_tree_kind = 1
                random_tree_kind_str = trees[random_tree_kind]
                random_tree = np.random.randint(1, num_tree_folders)
                random_tree_path = DataPath + random_tree_kind_str + '/' + random_tree_kind_str + '_High' + str(
                    random_tree) + '/'
            '''find total number of frames in respective folder'''
            data_table = np.asarray(
                pd.read_table(random_tree_path + 'poses.txt',
                              sep='\s',
                              header=0,
                              index_col=False,
                              engine='python'))
            num_frames = np.max(data_table[:, 0])

            abc = np.squeeze(np.array(np.where(data_table[:, 7] == 1)))
            mean_x = np.mean(data_table[abc[0]:abc[-1], 1])
            mean_y = np.mean(data_table[abc[0]:abc[-1], 2])

            drone_trajectory_z = data_table[:, 3] * -1
            drone_trajectory_x = (data_table[:, 1] - mean_x)
            drone_trajectory_y = (data_table[:, 2] - mean_y)
            drone_trajectory = np.array(
                [drone_trajectory_x, drone_trajectory_y, drone_trajectory_z])
            drone_trajectory_x = drone_trajectory[0, :]
            drone_trajectory_y = drone_trajectory[1, :]
            drone_trajectory_z = drone_trajectory[2, :]
            '''find random initial frame'''
            snapshot1_index = np.random.randint(num_frames)
            if whichTrees == 'debug':
                snapshot1_index = snapshot1_index_debug
            '''load respective depth img'''
            file_index1 = file_index(snapshot1_index)

            depthImg1_raw = convert_pfm(random_tree_path + file_index1 +
                                        'pl.pfm')
            '''delete too far away points'''
            depthImg1 = depthImg1_raw
            depthImg1[depthImg1_raw > depth_limit] = 0
            '''check if there is a tree'''
            if np.array(
                    np.nonzero(
                        depthImg1[frame_size:-frame_size,
                                  frame_size:-frame_size])).shape[1] > 1000:

                first_img_found = 1
                '''choose random point on tree'''
                rand_row, rand_col = 0, 0

                while np.absolute(cy - rand_row) > (
                        cy - frame_size) or np.absolute(cx - rand_col) > (
                            cx - frame_size):
                    random_spot1 = np.random.randint(
                        0,
                        np.array(np.nonzero(depthImg1)).shape[1])
                    rand_row, rand_col = np.array(
                        np.nonzero(depthImg1))[:, random_spot1]

                if whichTrees == 'debug':

                    rand_row, rand_col = rand_row_debug, rand_col_debug
                '''get 1st cam position and quaternions'''
                cam_position1 = get_cam_position(snapshot1_index,
                                                 random_tree_path)
                qx1 = cam_position1[3]
                qy1 = cam_position1[4]
                qz1 = cam_position1[5]
                if qz1 == 0.:
                    qz1 = 1e-04
                qw1 = cam_position1[6]

                quaternion_cam1 = Quaternion(qw1, qx1, qy1, qz1)

                x1, y1, z1 = cam_position1[:3]
                x1 = x1 - mean_x
                y1 = y1 - mean_y
                z1 = -1 * z1
                drone1position = np.transpose(np.array([x1, y1, z1]))
                lens1position = drone1position + quaternion_cam1.rotate(
                    np.array([drone_cam_translation[2, 3], 0, 0]))

        match_loop = 0

        if gamma_loop > max_iter or frustum_loop > max_iter or bbox_loop > max_iter:
            break

        while match_found < 1:

            if gamma_loop > max_iter or frustum_loop > max_iter or bbox_loop > max_iter:
                break
            '''find 2nd camera position in proximity'''
            cam_distance = 100.

            match_loop += 1

            while cam_distance > cam_distance_threshold:
                '''choose random and different 2nd point'''
                snapshot2_index = snapshot1_index
                while snapshot2_index == snapshot1_index:
                    snapshot2_index = np.random.randint(num_frames)
                if whichTrees == 'debug':
                    snapshot2_index = snapshot2_index_debug
                file_index2 = file_index(snapshot2_index)
                '''get 2nd cam position and quaternion'''
                cam_position2 = get_cam_position(snapshot2_index,
                                                 random_tree_path)
                qx2 = cam_position2[3]
                qy2 = cam_position2[4]
                qz2 = cam_position2[5]
                if qz2 == 0.:
                    qz2 = 1e-04
                qw2 = cam_position2[6]

                quaternion_cam2 = Quaternion(qw2, qx2, qy2, qz2)

                x2, y2, z2 = cam_position2[:3]
                x2 = x2 - mean_x
                y2 = y2 - mean_y
                z2 = -1 * z2
                drone2position = np.transpose(np.array([x2, y2, z2]))

                lens2position = drone2position + quaternion_cam2.rotate(
                    np.array([drone_cam_translation[2, 3], 0, 0]))
                '''load respective depth img'''
                depthImg2_raw = convert_pfm(random_tree_path + file_index2 +
                                            'pl.pfm')
                depthImg2 = depthImg2_raw
                depthImg2[depthImg2_raw > depth_limit] = 0

                # '''load the segmentation'''
                # segmentation2 = imageio.imread(random_tree_path + file_index2 + 'seg.ppm')
                # segmentation2 = np.round(np.mean(segmentation2, 2) / np.max(np.asarray(np.reshape(np.mean(segmentation2,2), (-1)))), 1)
                '''check if relative distance under threshold'''
                cam_distance = dist_calc(lens1position, lens2position)

            if gamma_loop > max_iter or frustum_loop > max_iter or bbox_loop > max_iter:
                break
            '''Compute transformtation'''
            translation = np.transpose(
                np.array([x1, y1, z1]) - np.array([x2, y2, z2]))
            '''compute sigma1 -> point coordinates'''
            cam1_2_point_distance = depthImg1[rand_row, rand_col]
            d1 = cam1_2_point_distance
            '''translate row, col to cam-coords'''
            u1, v1 = rand_col, 2 * cy - rand_row

            alpha1 = (u1 - cx) * d1 / fx
            beta1 = (v1 - cy) * d1 / fy
            gamma1 = d1

            sigma1 = np.transpose(np.array([alpha1, beta1, gamma1, 1.]))

            PointRealWorld = drone1position + quaternion_cam1.rotate(
                np.array([gamma1 + .46, -alpha1, beta1]))

            # lens2point1 = PointRealWorld - lens1position
            # lens2point2 = PointRealWorld - lens2position

            lens2point1 = PointRealWorld - drone1position
            lens2point2 = PointRealWorld - drone2position

            lens2point1_vector = np.array(
                [[lens1position[0], PointRealWorld[0]],
                 [lens1position[1], PointRealWorld[1]],
                 [lens1position[2], PointRealWorld[2]]])
            lens2point2_vector = np.array(
                [[lens2position[0], PointRealWorld[0]],
                 [lens2position[1], PointRealWorld[1]],
                 [lens2position[2], PointRealWorld[2]]])

            newSigma = quaternion_cam2.inverse.rotate(lens2point2)

            alpha2 = -1 * newSigma[1]
            beta2 = newSigma[2]
            gamma2 = newSigma[0] - .46

            sigma2 = np.transpose(np.array([alpha2, beta2, gamma2, 1.]))

            PointRealWorld2 = lens2position + quaternion_cam2.rotate(
                np.array([gamma2, -alpha2, beta2]))

            d2 = gamma2

            u2 = (alpha2 * fx / d2) + cx
            v2 = (beta2 * fy / d2) + cy

            if np.abs(alpha2 * fx / d2) + frame_size > cx or np.abs(
                    beta2 * fy / d2) + frame_size > cy:
                print('error2: not in camera frustum')
                frustum_loop += 1
                continue

            # if np.abs(alpha2 * fx / d2) + frame_size < cx and np.abs(beta2 * fy / d2) + frame_size < cy:
            #     match_found = 1

            col2 = int(np.round(u2))
            row2 = int(np.round(2 * cy - v2))

            # point_label2 = segmentation2[row2, col2]

            if np.abs(depthImg2[row2, col2] - gamma2) > gamma_tolerance:
                gamma_loop += 1
                print('error: depth does not match gamma')
                continue

            p1_bboxCornersCam = bbox_corners(sigma1[:3])
            p2_bboxCornersCam = bbox_corners(sigma2[:3])

            p1_bboxCorners_test, p2_bboxCorners_test = [], []
            for j in range(8):
                p1_bboxCorners_test.append(
                    lens1position + quaternion_cam1.rotate(
                        np.array([
                            p1_bboxCornersCam[2, j], -p1_bboxCornersCam[0, j],
                            p1_bboxCornersCam[1, j]
                        ])))
                p2_bboxCorners_test.append(
                    lens2position + quaternion_cam2.rotate(
                        np.array([
                            p2_bboxCornersCam[2, j], -p2_bboxCornersCam[0, j],
                            p2_bboxCornersCam[1, j]
                        ])))

            p1_bboxCorners_test = np.asarray(p1_bboxCorners_test)
            p2_bboxCorners_test = np.asarray(p2_bboxCorners_test)

            u_BBox1 = np.round((p1_bboxCornersCam[0, :] * fx /
                                p1_bboxCornersCam[2, :]) + cx)
            v_BBox1 = 2 * cy - np.round(
                (p1_bboxCornersCam[1, :] * fy / p1_bboxCornersCam[2, :]) + cy)

            bboxPixX1 = np.array([
                np.int(u1 - max(np.abs(u1 - u_BBox1))),
                np.int(u1 + max(np.abs(u1 - u_BBox1)))
            ])
            # bboxPixY1 = np.array([v1-max(np.abs(v1-v_BBox1)), v1+max(np.abs(v1-v_BBox1))])
            bboxPixY1 = np.array([
                np.int(rand_row - max(np.abs(rand_row - v_BBox1))),
                np.int(rand_row + max(np.abs(rand_row - v_BBox1)))
            ])

            u_BBox2 = np.round((p2_bboxCornersCam[0, :] * fx /
                                p2_bboxCornersCam[2, :]) + cx)
            v_BBox2 = 2 * cy - np.round(
                (p2_bboxCornersCam[1, :] * fy / p2_bboxCornersCam[2, :]) + cy)

            bboxPixX2 = np.array([
                np.int(u2 - max(np.abs(u2 - u_BBox2))),
                np.int(u2 + max(np.abs(u2 - u_BBox2)))
            ])
            # bboxPixY2 = np.array([v2-max(np.abs(v2-v_BBox2)), v2+max(np.abs(v2-v_BBox2))])
            bboxPixY2 = np.array([
                np.int(row2 - max(np.abs(row2 - v_BBox2))),
                np.int(row2 + max(np.abs(row2 - v_BBox2)))
            ])

            if np.any(bboxPixX1 <= 0) or np.any(bboxPixX1 > 2 * cx) or np.any(
                    bboxPixY1 <= 0) or np.any(bboxPixY1 > 2 * cy):
                match_found = 0
                bbox_loop += 1
                print('error1: not in bbox frustum')
                continue

            if np.any(bboxPixX2 <= 0) or np.any(bboxPixX2 > 2 * cx) or np.any(
                    bboxPixY2 <= 0) or np.any(bboxPixY2 > 2 * cy):
                print('error2: not in bbox frustum')
                match_found = 0
                bbox_loop += 1
                continue

            p1_bboxRangePixels = np.array([[bboxPixX1], [bboxPixY1]])
            p2_bboxRangePixels = np.array([[bboxPixX2], [bboxPixY2]])

            match_found = 1

            if gamma_loop > max_iter or frustum_loop > max_iter or bbox_loop > max_iter:
                break

        if gamma_loop > max_iter or frustum_loop > max_iter or bbox_loop > max_iter:
            continue
    '''load the segmentation'''
    segmentation1 = imageio.imread(random_tree_path + file_index1 + 'seg.ppm')
    segmentation1 = np.round(
        np.mean(segmentation1, 2) /
        np.max(np.asarray(np.reshape(np.mean(segmentation1, 2), (-1)))), 1)

    segmentation2 = imageio.imread(random_tree_path + file_index2 + 'seg.ppm')
    segmentation2 = np.round(
        np.mean(segmentation2, 2) /
        np.max(np.asarray(np.reshape(np.mean(segmentation2, 2), (-1)))), 1)
    '''label point'''
    point_label1 = segmentation1[rand_row, rand_col]

    point_label2 = segmentation2[row2, col2]

    p1 = {
        'bboxCornersCam': p1_bboxCornersCam,
        'bboxRangePixels': p1_bboxRangePixels,
        'framePath': random_tree_path + file_index1,
        'pixelCoords': np.array([rand_col, rand_row]),
        'camCoords': sigma1[:3],
        'label': point_label1,
        'depthIm': depthImg1_raw
    }

    p2 = {
        'bboxCornersCam': p2_bboxCornersCam,
        'bboxRangePixels': p2_bboxRangePixels,
        'framePath': random_tree_path + file_index2,
        'pixelCoords': np.array([col2, row2]),
        'camCoords': sigma2[:3],
        'label': point_label2,
        'depthIm': depthImg2_raw
    }

    while non_match_found < 1:

        point2point_dist = 0.

        while point2point_dist < point_dist_threshold:
            '''find random initial frame'''
            snapshot3_index = np.random.randint(num_frames)
            '''load respective depth img'''
            file_index3 = file_index(snapshot3_index)

            depthImg3_raw = convert_pfm(random_tree_path + file_index3 +
                                        'pl.pfm')
            depthImg3 = depthImg3_raw
            '''delete too far away points'''
            depthImg3[depthImg3_raw > depth_limit] = 0
            '''check if there is a tree'''
            if np.array(
                    np.nonzero(
                        depthImg3[frame_size:-frame_size,
                                  frame_size:-frame_size])).shape[1] > 10:

                random_spot3 = np.random.randint(
                    0,
                    np.array(
                        np.nonzero(
                            depthImg3[frame_size:-frame_size,
                                      frame_size:-frame_size])).shape[1])
                row3, col3 = np.array(np.nonzero(depthImg3))[:, random_spot3]
                '''get 1st cam position and quaternions'''
                cam_position3 = get_cam_position(snapshot3_index,
                                                 random_tree_path)
                qx3 = cam_position3[3]
                qy3 = cam_position3[4]
                qz3 = cam_position3[5]
                if qz3 == 0.:
                    qz3 = 1e-04
                qw3 = cam_position3[6]

                quaternion_cam3 = Quaternion(qw3, qx3, qy3, qz3)

                x3, y3, z3 = cam_position3[:3]
                x3 = x3 - mean_x
                y3 = y3 - mean_y
                z3 = -1 * z3
                drone3position = np.transpose(np.array([x3, y3, z3]))
                lens3position = drone3position + quaternion_cam3.rotate(
                    np.array([drone_cam_translation[2, 3], 0, 0]))

                d3 = depthImg3[row3, col3]
                '''translate row, col to cam-coords'''
                u3, v3 = col3, 2 * cy - row3

                alpha3 = (u3 - cx) * d3 / fx
                beta3 = (v3 - cy) * d3 / fy
                gamma3 = d3

                sigma3 = np.transpose(np.array([alpha3, beta3, gamma3, 1.]))

                PointRealWorld3 = drone3position + quaternion_cam3.rotate(
                    np.array([gamma3 + .46, -alpha3, beta3]))

                lens2point3 = PointRealWorld3 - drone3position

                lens2point3_vector = np.array(
                    [[lens3position[0], PointRealWorld3[0]],
                     [lens3position[1], PointRealWorld3[1]],
                     [lens3position[2], PointRealWorld3[2]]])

                point2point_dist = dist_calc(PointRealWorld3, PointRealWorld)

                p3_bboxCornersCam = bbox_corners(sigma3[:3])

                p3_bboxCorners_test = []
                for j in range(8):
                    p3_bboxCorners_test.append(
                        lens3position + quaternion_cam3.rotate(
                            np.array([
                                p3_bboxCornersCam[2, j],
                                -p3_bboxCornersCam[0, j], p3_bboxCornersCam[1,
                                                                            j]
                            ])))

                p3_bboxCorners_test = np.asarray(p3_bboxCorners_test)

                u_BBox3 = np.round((p3_bboxCornersCam[0, :] * fx /
                                    p3_bboxCornersCam[2, :]) + cx)
                v_BBox3 = 2 * cy - np.round((p3_bboxCornersCam[1, :] * fy /
                                             p3_bboxCornersCam[2, :]) + cy)

                bboxPixX3 = np.array([
                    col3 - max(np.abs(col3 - u_BBox3)),
                    col3 + max(np.abs(col3 - u_BBox3))
                ])
                bboxPixY3 = np.array([
                    row3 - max(np.abs(row3 - v_BBox3)),
                    row3 + max(np.abs(row3 - v_BBox3))
                ])

                p3_bboxRangePixels = np.array([[bboxPixX3], [bboxPixY3]])

                lowBoundX = int(np.min(p3_bboxRangePixels))
                highBoundX = int(np.max(p3_bboxRangePixels)) + 1
                lowBoundY = int(np.min(p3_bboxRangePixels))
                highBoundY = int(np.max(p3_bboxRangePixels)) + 1

                if lowBoundX < 1 or lowBoundY < 1 or highBoundX > 2 * cx or highBoundY > 2 * cy:
                    match_found = 0
                    print('error: bounds not in frame')
                    continue

                if np.any(bboxPixX3 <= 0) or np.any(
                        bboxPixX3 > 2 * cx) or np.any(
                            bboxPixY3 <= 0) or np.any(bboxPixY3 > 2 * cy):
                    match_found = 0
                    print('error: point 3 not in bbox frustum')
                    continue

                else:
                    non_match_found = 1
                    print('3rd one fully loaded!')
    '''load the segmentation'''
    segmentation3 = imageio.imread(random_tree_path + file_index3 + 'seg.ppm')
    segmentation3 = np.round(
        np.mean(segmentation3, 2) /
        np.max(np.asarray(np.reshape(np.mean(segmentation3, 2), (-1)))), 1)

    p3_bboxCorners_test = []
    for j in range(8):
        p3_bboxCorners_test.append(lens3position + quaternion_cam3.rotate(
            np.array([
                p3_bboxCornersCam[2, j], -p3_bboxCornersCam[0, j],
                p3_bboxCornersCam[1, j]
            ])))
    p3_bboxCorners_test = np.asarray(p3_bboxCorners_test)
    '''label point'''
    point_label3 = segmentation3[row3, col3]
    p3 = {
        'bboxCornersCam': p3_bboxCornersCam,
        'bboxRangePixels': p3_bboxRangePixels,
        'framePath': random_tree_path + file_index3,
        'pixelCoords': np.array([col3, row3]),
        'camCoords': sigma3[:3],
        'label': point_label3,
        'depthIm': depthImg3_raw
    }

    p1_voxelGridTDF = getPatchData(p1, voxelGridPatchRadius, voxelSize,
                                   voxelMargin)
    print()
    print('1st TDF ==========> success!')
    print(p1_voxelGridTDF.ndim)
    # p2_voxelGridTDF = getPatchData(p2,voxelGridPatchRadius,voxelSize,voxelMargin)
    # print()
    # print('2nd TDF ==========> success!')
    # p3_voxelGridTDF = getPatchData(p3,voxelGridPatchRadius,voxelSize,voxelMargin)
    # print()
    # print('3rd TDF ==========> success!')

    if output:
        # print('num_frames...............', num_frames)
        # print('random_tree_kind.........', random_tree_kind)
        # print('random_tree_kind_str.....', random_tree_kind_str)
        # print('random_tree..............', random_tree)
        # print('random_tree_path.........', random_tree_path)
        # print('index1...................', snapshot1_index)
        print(random_tree_path + file_index1 + 'pl.pfm')
        # print('rand_row, rand_col.......', rand_row, rand_col)
        # print('label 1st point..........', point_label1)
        print('lens1position............', np.round(lens1position, 2))
        print()
        print(
            '=======================================================> initial frame found'
        )
        print()
        print(random_tree_path + file_index2 + 'pl.pfm')
        print('lens2position............', np.round(lens2position, 2))
        print('cam_distance.............', np.round(cam_distance, 2))
        print()
        print(
            '================================================> 2nd frame found'
        )
        print()
        print(random_tree_path + file_index1 + 'pl.pfm')
        print()
        print('col1, row1...............', rand_col, rand_row)
        print('col2, row2...............', col2, row2)
        print('col3, row3...............', col3, row3)
        print()
        print('sigma1:', np.round(sigma1[:3], 1))
        print('sigma2:', np.round(sigma2[:3], 1))
        print('sigma3:', np.round(sigma3[:3], 1))
        print()
        print('PointRealWorld1')
        print(np.round(PointRealWorld, 1))
        print('PointRealWorld2')
        print(np.round(PointRealWorld2, 1))
        print('PointRealWorld3')
        print(np.round(PointRealWorld3, 1))
        # print('num_frames...............', num_frames)
        # print('random_tree_kind.........', random_tree_kind)
        # print('random_tree_kind_str.....', random_tree_kind_str)
        # print('random_tree..............', random_tree)
        # print('random_tree_path.........', random_tree_path)
        # print('index1...................', snapshot1_index)
        print()
        print(
            '=======================================================> third frame found'
        )
        print()

    if plots:

        # '''load respective depth img'''
        # depthImg2 = convert_pfm(random_tree_path + file_index2 + 'pl.pfm')
        # depthImg2[depthImg2 > depth_limit] = 0

        print('depth-------', depthImg2[row2, col2])

        fig = plt.figure()
        ax = fig.add_subplot(111, projection='3d')

        ax.scatter(x1, y1, z1, c='b')
        ax.scatter(lens1position[0],
                   lens1position[1],
                   lens1position[2],
                   c='b',
                   label='inital cam')

        ax.scatter(x2, y2, z2, c='g')
        ax.scatter(lens2position[0],
                   lens2position[1],
                   lens2position[2],
                   c='g',
                   label='match cam')

        ax.scatter(x3, y3, z3, c='r')
        ax.scatter(lens3position[0],
                   lens3position[1],
                   lens3position[2],
                   c='r',
                   label='non-match cam')

        ax.scatter(PointRealWorld[0],
                   PointRealWorld[1],
                   PointRealWorld[2],
                   c='b',
                   s=10,
                   label='point1')
        ax.scatter(PointRealWorld2[0],
                   PointRealWorld2[1],
                   PointRealWorld2[2],
                   c='g',
                   s=10,
                   label='point2')
        ax.scatter(PointRealWorld3[0],
                   PointRealWorld3[1],
                   PointRealWorld3[2],
                   c='r',
                   s=10,
                   label='point3')

        ax.scatter(p1_bboxCorners_test[:, 0],
                   p1_bboxCorners_test[:, 1],
                   p1_bboxCorners_test[:, 2],
                   c='b',
                   s=10,
                   label='bbox1')
        ax.scatter(p2_bboxCorners_test[:, 0],
                   p2_bboxCorners_test[:, 1],
                   p2_bboxCorners_test[:, 2],
                   c='g',
                   s=10,
                   label='bbox2')
        ax.scatter(p3_bboxCorners_test[:, 0],
                   p3_bboxCorners_test[:, 1],
                   p3_bboxCorners_test[:, 2],
                   c='r',
                   s=10,
                   label='bbox3')

        ax.plot(lens2point1_vector[0],
                lens2point1_vector[1],
                lens2point1_vector[2],
                c='b',
                label='sight1')
        ax.plot(lens2point2_vector[0],
                lens2point2_vector[1],
                lens2point2_vector[2],
                c='g',
                label='sight2')
        ax.plot(lens2point3_vector[0],
                lens2point3_vector[1],
                lens2point3_vector[2],
                c='r',
                label='sight3')

        ax.plot(drone_trajectory_x,
                drone_trajectory_y,
                drone_trajectory_z,
                c='y',
                label='drone flight')

        ax.plot(np.zeros(2),
                np.zeros(2),
                np.linspace(0, 8, 2),
                c='k',
                label='tree')

        ax.legend()
        ax.set_xlabel('X Label')
        ax.set_ylabel('Y Label')
        ax.set_zlabel('Z Label')
        ax.set_xlim(-10, 10)
        ax.set_ylim(-10, 10)
        ax.set_zlim(0, 10)

        testimage1 = imageio.imread(random_tree_path + file_index1 + '.ppm')

        print('-------------------------------------------')
        print('-------------------------------------------')
        print('-------------------------------------------')
        print('-------------------------------------------')
        print('-------------------------------------------')
        print('-------------------------------------------')
        print('-------------------------------------------')
        print(testimage1.shape)
        print('-------------------------------------------')
        print('-------------------------------------------')
        print('-------------------------------------------')
        print('-------------------------------------------')
        print('-------------------------------------------')
        print('-------------------------------------------')
        print('-------------------------------------------')
        print('-------------------------------------------')
        print('-------------------------------------------')
        testimage2 = imageio.imread(random_tree_path + file_index2 + '.ppm')
        testimage3 = imageio.imread(random_tree_path + file_index3 + '.ppm')

        plt.figure(figsize=(10, 10))
        plt.subplot(231)
        plt.imshow(depthImg1)
        # plt.scatter(rand_col, rand_row, c="r")
        plt.scatter(rand_col, rand_row, 80, 'w', 'x')
        #plt.scatter(np.array(np.nonzero(depthImg1))[1,:],np.array(np.nonzero(depthImg1))[0,:])
        plt.title('considered point:' + str(point_label1))
        plt.grid()

        plt.subplot(232)
        plt.imshow(depthImg2)
        plt.title('corresponding view point')
        plt.scatter(col2, row2, 80, 'w', 'x')
        # # plt.scatter(u2, v2, 80, 'y', 'x')
        # # plt.scatter(pixX, 2 * cy - pixY, 80, 'r', 'x')
        # # plt.scatter(pixX, pixY, 80, 'r', 'x')
        plt.grid()

        plt.subplot(233)
        plt.imshow(depthImg3)
        plt.title('non-match')
        plt.scatter(col3, row3, 80, 'w', 'x')
        # # plt.scatter(u2, v2, 80, 'y', 'x')
        # # plt.scatter(pixX, 2 * cy - pixY, 80, 'r', 'x')
        # # plt.scatter(pixX, pixY, 80, 'r', 'x')
        plt.grid()

        plt.subplot(234)
        plt.imshow(testimage1)
        plt.title('image 1')
        plt.scatter(rand_col, rand_row, 80, 'r', 'x')
        plt.grid()

        plt.subplot(235)
        plt.imshow(testimage2)
        plt.title('image 2')
        plt.scatter(col2, row2, 80, 'r', 'x')
        # # plt.scatter(u2, v2, 80, 'y', 'x')
        # # plt.scatter(pixX, 2 * cy - pixY, 80, 'r', 'x')
        # # plt.scatter(pixX, pixY, 80, 'r', 'x')
        plt.grid()

        plt.subplot(236)
        plt.imshow(testimage3)
        plt.title('image 2')
        plt.scatter(col3, row3, 80, 'r', 'x')
        # # plt.scatter(u2, v2, 80, 'y', 'x')
        # # plt.scatter(pixX, 2 * cy - pixY, 80, 'r', 'x')
        # # plt.scatter(pixX, pixY, 80, 'r', 'x')
        plt.grid()

        plt.show()

    return p1_voxelGridTDF  #, p2_voxelGridTDF, p3_voxelGridTDF
Esempio n. 23
0
class CoordinateSystems:
    def __init__(self):

        #World Coordinate system vectors
        self.ball_world = None #Position, Velocities in world frame
        self.car_world = None #Position, Velocities in world frame
        self.q_car_to_world = None #Unit Quaternion to change car frame to world frame
        self.R_car_to_world = None #Rigid Transformation matrix to change car frame to world frame

        #Car Coordinate system vectors
        self.ball_car = None #Position, Velocities to ball in car coordinates
        self.q_car_to_ball = None #Unit Quaternion pointing to ball from car frame
        self.q_world_to_car = None #Unit Quaternion to change from world frame to car frame
        self.R_world_to_car = None #Rigid Transformation matrix to change world frame to car frame
        self.heading = None

    def update(self, car, ball):
        #Rcar_to_world
        r = -1*car.roll #rotation around roll axis to get car to world frame
        p = -1*car.pitch #rotation around pitch axis to get car to world frame
        y = car.yaw #rotation about the world z axis to get the car to the world frame
        self.Rx = np.matrix([[1, 0, 0], [0, math.cos(r), -1*math.sin(r)], [0, math.sin(r), math.cos(r)]])
        self.Ry = np.matrix([[math.cos(p), 0, math.sin(p)], [0, 1, 0], [-1*math.sin(p), 0, math.cos(p)]])
        self.Rz = np.matrix([[math.cos(y), -1*math.sin(y), 0], [math.sin(y), math.cos(y), 0], [0, 0, 1]])
        #Order of rotations from car to world is z then y then x
        self.Rinter = np.matmul(self.Rz, self.Ry)
        self.Rcar_to_world = np.matmul(self.Rinter, self.Rx)

        #Rworld_to_car
        r = car.roll #rotation around roll axis to get world to car frame
        p = car.pitch #rotation around pitch axis to get world to car frame
        y = -1*car.yaw #rotation about the world z axis to get world to car frame
        self.Rx = np.matrix([[1, 0, 0], [0, math.cos(r), -1*math.sin(r)], [0, math.sin(r), math.cos(r)]])
        self.Ry = np.matrix([[math.cos(p), 0, math.sin(p)], [0, 1, 0], [-1*math.sin(p), 0, math.cos(p)]])
        self.Rz = np.matrix([[math.cos(y), -1*math.sin(y), 0], [math.sin(y), math.cos(y), 0], [0, 0, 1]])
        #Order of rotations from world to car is x then y then z
        self.Rinter = np.matmul(self.Rx, self.Ry)
        self.Rworld_to_car = np.matmul(self.Rinter, self.Rz)

        #Rworld_to_ball
        ux = np.array([1.,0.,0.])
        uy = np.array([0.,1.,0.])
        uz = np.array([0.,0.,1.])
        Pb = np.array([ball.x, ball.y, ball.z])
        Pc = np.array([car.x, car.y, car.z]) #negate z because z axis for car is pointed downwards
        Pbc = np.subtract(Pb, Pc) #Get vector to ball from car in car coordinates

        xyz = np.cross(ux, Pbc) #xyz of quaternion is rotation between Pbc and unit x vector
        w = math.sqrt(1 * ((Pbc.item(0) ** 2) + (Pbc.item(1) ** 2) + (Pbc.item(2) ** 2))) + np.dot(ux, Pbc) #scalr of quaternion
        qbcworld = Quaternion(w = w, x = xyz.item(0), y = xyz.item(1), z = xyz.item(2))

        #Quaternions for rotations between frames
        self.Qcar_to_world = Quaternion(matrix = self.Rcar_to_world).normalised.normalised
        self.Qworld_to_car = Quaternion(matrix = self.Rworld_to_car).normalised.normalised
        self.Qworld_to_ball = Quaternion(matrix = qbcworld.rotation_matrix).normalised.normalised

        #Random rotation to try to point to
        r = 1
        p = 0
        y = 1

        self.Rx = np.matrix([[1, 0, 0], [0, math.cos(r), -1*math.sin(r)], [0, math.sin(r), math.cos(r)]])
        self.Ry = np.matrix([[math.cos(p), 0, math.sin(p)], [0, 1, 0], [-1*math.sin(p), 0, math.cos(p)]])
        self.Rz = np.matrix([[math.cos(y), -1*math.sin(y), 0], [math.sin(y), math.cos(y), 0], [0, 0, 1]])
        #Order of rotations from world to car is x then y then z
        self.Rinter = np.matmul(self.Rx, self.Ry)
        self.Rworld_to_point = np.matmul(self.Rinter, self.Rz)

        self.Qworld_to_point = Quaternion(matrix = self.Rworld_to_point).normalised.normalised
        # print('r', r, 'p', p, 'y', y, 'q', self.Qtocar)

        ux_world = np.array([1.,0.,0.])
        uy_world = np.array([0.,1.,0.])
        uz_world = np.array([0.,0.,1.])
        self.ux_world = np.array([1.,0.,0.])
        self.uy_world = np.array([0.,1.,0.])
        self.uz_world = np.array([0.,0.,1.])
        self.headingx = self.Qworld_to_car.normalised.rotate(ux_world)
        self.headingy = self.Qworld_to_car.normalised.rotate(uy_world)
        self.headingz = self.Qworld_to_car.normalised.rotate(uz_world)
        #get point vectors for car and ball


        Pb_world = np.array([ball.x, ball.y, ball.z]) #multiply by negative one to keep axis orientations consistent between car and world
        Pc_world = np.array([car.x, car.y, car.z])
        self.Pbc_world = np.subtract(Pb_world, Pc_world) #Get vector to ball from car in world coordinate system but origin at car center
        self.Pbc_world_normalised = self.Pbc_world/np.linalg.norm(self.Pbc_world, axis = 0)
        self.Pbc_world_magnitude = np.linalg.norm(self.Pbc_world, axis=0)


        xyz = np.cross(self.headingx, self.Pbc_world/np.linalg.norm(self.Pbc_world, axis=0)) #xyz of quaternion is rotation between the UNIT Pbc vector and normalised x vector
        w = math.sqrt(1 * ((self.Pbc_world.item(0) ** 2) + (self.Pbc_world.item(1) ** 2) + (self.Pbc_world.item(2) ** 2))) + np.dot(ux_world, self.Pbc_world) #scalr of quaternion
        self.qbcworld = Quaternion(w = w, x = xyz.item(0), y = xyz.item(1), z = xyz.item(2))
        # eulerAngles = toEulerAngle(qbcworld.normalised) #convert quaternion to euler angles
        # print("Pbc", self.Pbc)

        #Get quaternion pointing to ball in car coordinates
        self.Pbc_car = self.Qworld_to_car.normalised.rotate(self.Pbc_world)
        self.P0_world = np.array([1,0,0])
        self.P0_car = self.Qworld_to_car.normalised.rotate(self.P0_world)
        self.P1_car = np.array([1,0,0])
        self.P1_world = self.Qcar_to_world.normalised.rotate(self.P1_car)
        vector = self.Qcar_to_world.rotate(self.Pbc_car)
        # print("Pbc_world", self.Pbc_world, 'Pbc_car', self.Pbc_car, 'Pbc_world2', vector, 'r:', car.roll, 'p:', car.pitch, 'y:', car.yaw)

        #Rotational Velocity conversion from world to car coordinates
        wx_world = np.array([car.wx, 0, 0])
        wy_world = np.array([0, car.wy, 0])
        wz_world = np.array([0, 0, car.wz])

        self.wx_car = self.toCarCoordinates(wx_world)
        self.wy_car = self.toCarCoordinates(wy_world)
        self.wz_car = self.toCarCoordinates(wz_world)
        self.w_car = self.wx_car + self.wy_car + self.wz_car

        # print('wx_car:', self.wx_car, 'wy_car', self.wy_car, 'wz_car', self.wz_car)
    def getDesiredQuaternion(self, vector):#vector is the 3d point vector to where we want the car to face in world coordinates / attitude is the current attitude of the car [roll, pitch, yaw]
        #Get quaternion that rotates world coordinates to car Coordinates
        xyz = np.cross(self.ux_world, vector/np.linalg.norm(vector, axis=0)) #xyz of quaternion is rotation between the UNIT Pbc vector and normalised x vector
        w = math.sqrt(1 * ((vector.item(0) ** 2) + (vector.item(1) ** 2) + (vector.item(2) ** 2))) + np.dot(self.ux_world, vector) #scalr of quaternion
        desired = Quaternion(w = w, x = xyz.item(0), y = xyz.item(1), z = xyz.item(2))
        return desired
        #Convert vector to Car Coordinates

        #Convert new vector in car coordinates to a quaternion

    def getVectorToBall_world(self):
        vector = self.Qcar_to_world.rotate(self.Pbc_car)

        return vector

    def toWorldCoordinates(self, vector):
        vec = self.Qcar_to_world.rotate(vector)
        return vec
    def toCarCoordinates(self, vector): #vector is the 3d point vector to rotate to car coordintaes
        #get world to car quaternion
        vec = self.Qworld_to_car.rotate(vector)
        return vec

    def createQuaternion_world_at_car(self, point):
        #convert point into quaternion
        #Rworld_to_ball
        ux = np.array([1.,0.,0.])
        uy = np.array([0.,1.,0.])
        uz = np.array([0.,0.,1.])
        Ppoint = np.array([point.item(0), point.item(1), point.item(2)])
        #Pcar = np.array([car.item(0), car.item(1), car.item(2)]) #negate z because z axis for car is pointed downwards
        P = Ppoint #np.subtract(Ppoint, Pcar) #Get vector to ball from car in car coordinates

        xyz = np.cross(ux, P) #xyz of quaternion is rotation between Pbc and unit x vector
        w = math.sqrt(1 * ((P.item(0) ** 2) + (P.item(1) ** 2) + (P.item(2) ** 2))) + np.dot(ux, P) #scalr of quaternion
        Qworld_to_point = Quaternion(w = w, x = xyz.item(0), y = xyz.item(1), z = xyz.item(2)).normalised

        #return quaternion
        return Qworld_to_point
Pcl_ = pcl.PointCloud()
data = np.empty((0, 3), dtype=np.float64)
i = 0
# for the pedestrian dataset /imu/data and /scan
# for bike data set /imu0/data and /scan_corrected
for topic, msg, t in bag.read_messages(
        topics=["/imu/data", "/scan"],
        start_time=rospy.Time(bag.get_start_time()) + rospy.Duration(240)):
    if topic == "/imu/data":
        curr_ori = Quaternion(msg.orientation.w, msg.orientation.x,
                              msg.orientation.y, msg.orientation.z)
        '''acc = curr_ori.conjugate.rotate([msg.linear_acceleration.x, msg.linear_acceleration.y, msg.linear_acceleration.z])
        x.append(acc[0]*dt+x[-1])
        y.append(acc[1]*dt+y[-1])
        z.append((acc[2] - 9.8)*dt+z[-1])'''
        v_z_ = curr_ori.rotate(v_z)
        if np.arccos(v_z_[2]) - math.pi > -.04:
            plot_scan = True
        else:
            plot_scan = False

    else:
        #if plot_scan:

        #data = np.concatenate((data, to_pcl_array(msg, curr_ori)), axis=0)
        data = to_pcl_array(msg, curr_ori)
        '''filter_ = Pcl_.make_statistical_outlier_filter()
        filter_.set_mean_k(50)
        filter_.set_std_dev_mul_thresh(1.0)
        Pcl_ = filter_.filter()'''
Esempio n. 25
0
def updateRealtimeVis(quat, idStr, ax):
    if idStr == 'head':
        headPos = quat.rotate([0, 0, 1])
        ind = np.linspace(0, 1, 11)
        x = [headPos[0] * i for i in ind]
        y = [headPos[1] * i for i in ind]
        z = [headPos[2] * i for i in ind]
        ax.plot(x, y, z)
        return quat

    euler = transformations.euler_from_quaternion(
        [quat[1], quat[2], quat[3], quat[0]])
    print(euler)
    if idStr == 'rightArm':
        ans = ANGLE_MAP_R[str(rad2Bucket(euler[2]))][str(rad2Bucket(
            euler[1]))][str(rad2Bucket(euler[0]))]
        if ans['shoulderX'] is not None:
            elbowRelativeEuler = [
                deg2rad(ans['shoulderX']),
                deg2rad(ans['shoulderZ']),
                deg2rad(ans['shoulderY'])
            ]
            elbowRelativeQuat = transformations.quaternion_from_euler(
                elbowRelativeEuler[0], elbowRelativeEuler[1],
                elbowRelativeEuler[2])
            elbowRelativeQuat = Quaternion(elbowRelativeQuat[3],
                                           elbowRelativeQuat[0],
                                           elbowRelativeQuat[1],
                                           elbowRelativeQuat[2])
            elbowPos = elbowRelativeQuat.rotate([0, 0, -1])
            wristRelativePos = quat.rotate([0, 0, -1])
            ind = np.linspace(0, 1, 11)
            x = [0 + elbowPos[0] * i for i in ind
                 ] + [0 + elbowPos[0] + wristRelativePos[0] * i for i in ind]
            y = [-1 + elbowPos[1] * i for i in ind
                 ] + [-1 + elbowPos[1] + wristRelativePos[1] * i for i in ind]
            z = [0 + elbowPos[2] * i for i in ind
                 ] + [0 + elbowPos[2] + wristRelativePos[2] * i for i in ind]
            ax.plot(x, y, z)
            return elbowRelativeQuat

    if idStr == 'leftArm':
        #print(euler)
        ans = ANGLE_MAP_L[str(rad2Bucket(euler[0]))][str(rad2Bucket(
            euler[2]))][str(rad2Bucket(euler[1]))]
        #print(ans)
        if ans['shoulderX'] is not None:
            elbowRelativeEuler = [
                deg2rad(ans['shoulderX']),
                deg2rad(ans['shoulderZ']),
                deg2rad(ans['shoulderY'])
            ]
            elbowRelativeQuat = transformations.quaternion_from_euler(
                elbowRelativeEuler[0], elbowRelativeEuler[1],
                elbowRelativeEuler[2])
            elbowRelativeQuat = Quaternion(elbowRelativeQuat[3],
                                           elbowRelativeQuat[0],
                                           elbowRelativeQuat[1],
                                           elbowRelativeQuat[2])
            elbowPos = elbowRelativeQuat.rotate([0, 0, -1])
            wristRelativePos = quat.rotate([0, 0, -1])
            ind = np.linspace(0, 1, 11)
            x = [0 + elbowPos[0] * i for i in ind
                 ] + [0 + elbowPos[0] + wristRelativePos[0] * i for i in ind]
            y = [1 + elbowPos[1] * i for i in ind
                 ] + [1 + elbowPos[1] + wristRelativePos[1] * i for i in ind]
            z = [0 + elbowPos[2] * i for i in ind
                 ] + [0 + elbowPos[2] + wristRelativePos[2] * i for i in ind]
            ax.plot(x, y, z)
            return elbowRelativeQuat
Esempio n. 26
0
# This program demonstrates various properties and applications of quaternions.

import numpy
from pyquaternion import Quaternion

my_quaternion = Quaternion(1, -2, 3, -4)
my_inverse = my_quaternion.inverse

numpy.set_printoptions(suppress=True) # Suppress insignificant values for clarity
u = numpy.array([0, -3/5, 4/5]) # Unit vector
v = numpy.array([0, 3/5, -4/5]) # Unit vector

u_prime = my_quaternion.rotate(u)
u_undo = my_inverse.rotate(u_prime)
v_prime = my_quaternion.rotate(v)
v_undo = my_inverse.rotate(v_prime)


print(v_prime)
print(v_undo)
print(u_prime)
print(u_undo)
                color[0][j] = 255
                color[1][j] = 0
                color[2][j] = 255
            j+=1

        for i in range(number_of_samples):
            motor_pos = values[()]['motor_position'][i]

            j = 0
            for select in args.select:
                # print(motor_pos)

                sensor_positions[i][j] = np.array(ball.config['sensor_pos'][select])
                quat = Quaternion(axis=[1, 0, 0], degrees=-motor_pos)

                sensor_positions[i][j] = quat.rotate(sensor_positions[i][j])
                angle = ball.config['sensor_angle'][select][2]
                sensor_quat = Quaternion(axis=[0, 0, 1], degrees=-angle)
                # sv = np.array([0,0,0])
                sv = values[()]['sensor_values'][i][select]
                sv = sensor_quat.rotate(sv)
                sv = quat.rotate(sv)
                if select>=14: # align sensors from other side of the pcb
                    quat2 = Quaternion(axis=[1, 0, 0], degrees=8)
                    sv = quat2.rotate(sv)

                quat2 = Quaternion(axis=[0, 1, 0], degrees=90)
                sv = quat2.rotate(sv)
                sensor_positions[i][j] = quat2.rotate(sensor_positions[i][j])

                sensor_values[i][j] = sv
Esempio n. 28
0
dt = pi
w_x = 0.5
w_z = 0.5
w_y = 0.5

w_norm = sqrt(w_x**2 + w_y**2 + w_z**2)

alpha = dt * w_norm / 2
q0 = cos(alpha)
q1 = sin(alpha) * w_x / w_norm
q2 = sin(alpha) * w_y / w_norm
q3 = sin(alpha) * w_z / w_norm

rot = Quaternion(q0, q1, q2, q3)
v = array([1, 0, 0])
v_prime = rot.rotate(v)
print(v_prime)


def quaternion_rotation(sample_rate, w_x, w_y, w_z, v_x, v_y, v_z, n):
    w_norm = np.sqrt(w_x[n]**2 + w_y[n]**2 + w_z[n]**2)
    alpha = w_norm / (sample_rate * 2)
    q0 = np.cos(alpha)
    q1 = np.sin(alpha) * w_x[n] / w_norm
    q2 = np.sin(alpha) * w_y[n] / w_norm
    q3 = np.sin(alpha) * w_z[n] / w_norm

    v = np.array([v_x[n], v_y[n], v_z[n]])
    q = Quaternion(q0, q1, q2, q3)
    v_prime = q.rotate(v)
Esempio n. 29
0
def extract_direction_dataset1(q):
    #q is quaternion
    v0 = [1, 0, 0]
    q = Quaternion([q[3], q[2], q[1], q[0]])
    return q.rotate(v0)
Esempio n. 30
0
    def generate_propagation_parameters(self):
        '''
        ---------------------------------------------
        generate_propagation_parameters()
        ---------------------------------------------
        
        This method finds the parameters to pass to the opencl kernel to correctly
        define the sampling grid. 

        '''
        # Find the unit normal vector to the plane.

        unit_normal = (1 /
                       np.linalg.norm(self.normal_vector)) * self.normal_vector

        # Define the quaternion required to rotate the parallel vector by an angle of pi/2.

        quaternion = Quaternion(
            axis=[unit_normal[0], unit_normal[1], unit_normal[2]],
            angle=tau / 4)

        # Rotate the parallel vector to get a new vector which is perpendicular to the normal and the parallel vector.

        self.vector_2 = quaternion.rotate(self.parallel_vector)

        # Find the norm of these vectors.

        parallel_vector_norm = np.linalg.norm(self.parallel_vector)
        vector_2_norm = np.linalg.norm(self.vector_2)

        # Find the unit vectors.

        unit_parallel_vector = (1 /
                                parallel_vector_norm) * self.parallel_vector

        unit_vector_2 = (1 / vector_2_norm) * self.vector_2

        # Compute the angle between these vectors to check that is equal to pi/2.

        angle_1 = int(
            np.round(
                np.rad2deg(
                    np.arccos(np.dot(unit_parallel_vector, unit_vector_2))),
                1))

        # Compute the angle between each of these vectors and the normal to ensure that they parameterise the plane perpendicular to the nromal.

        angle_2 = int(
            np.round(
                np.rad2deg(
                    np.arccos(
                        np.dot(self.parallel_vector, self.normal_vector) /
                        (parallel_vector_norm * vector_2_norm))), 1))
        angle_3 = int(
            np.round(
                np.rad2deg(
                    np.arccos(
                        np.dot(self.vector_2, self.normal_vector) /
                        (parallel_vector_norm * vector_2_norm))), 1))

        # Throw error if they are not.

        if np.bitwise_or(angle_1 != 90,
                         np.bitwise_or(int(angle_2) != 90,
                                       int(angle_3) != 90)):

            raise Exception(
                'Could not parameterise the requested sampling grid correctly!'
            )

        self.x_lim = np.float32(self.N_x / 2)
        self.y_lim = np.float32(self.N_y / 2)
        self.vx1 = np.float32(self.parallel_vector[0])
        self.vy1 = np.float32(self.parallel_vector[1])
        self.vz1 = np.float32(self.parallel_vector[2])
        self.x0 = np.float32(self.origin[0])
        self.y0 = np.float32(self.origin[1])
        self.z0 = np.float32(self.origin[2])
        self.vx2 = np.float32(np.round(self.vector_2[0], 4))
        self.vy2 = np.float32(np.round(self.vector_2[1], 4))
        self.vz2 = np.float32(np.round(self.vector_2[2], 4))