예제 #1
0
    def __init__(self, scene):
        self.scene = scene
        self.dynamics = 1

        # dynamics parameters
        self.l = 0.331

        # state
        self.xi = State(0, 0, 0, self)
        self.xid = State(3, 0, 0, self)
        self.xid0 = State(3, 0, math.pi / 4, self)
        self.reachedGoal = False
        # Control parameters
        self.kRho = 1
        self.kAlpha = 6
        self.kPhi = -1
        self.kV = 3.8
        self.gamma = 0.15

        #
        self.pointCloud = PointCloud(self)

        # Data to be recorded
        self.recordData = False
        self.data = Data(self)
        self.v1Desired = 0
        self.v2Desired = 0

        self.role = None
        self.neighbors = []
        self.leader = None  # Only for data recording purposes

        self.ctrl1_sm = []
        self.ctrl2_sm = []
예제 #2
0
    def run(self) -> None:
        cameraConnected, allDepth, allAmp, depthValues, amplitudeValues = self.sensor.getFrame(
            WaveLengthIDs['830'])

        if (not cameraConnected):
            return
        self.pointCloud = PointCloud(allDepth, HORIZONTAL_FOV_IN_DEG,
                                     VERTICAL_FOV_IN_DEG)
        self.__saveToFile(self.pointCloud.toPCD())
예제 #3
0
def trackBoundingBox():
	json_request = request.get_json()
	pointcloud = PointCloud.parse_json(json_request["pointcloud"], json_request["intensities"])
	filtered_indices = tracker.filter_pointcloud(pointcloud)
	next_bounding_boxes = tracker.predict_bounding_boxes(pointcloud)
	print(next_bounding_boxes)
	return str([filtered_indices, next_bounding_boxes])
예제 #4
0
class main:
    def __init__(self):
        self.sensor = Sensor()
        self.pointCloud: PointCloud = None
        while (1):
            self.run()
            if (input("press enter to take another snapshot, -1 to exit...") ==
                    str(-1)):
                return

    def run(self) -> None:
        cameraConnected, allDepth, allAmp, depthValues, amplitudeValues = self.sensor.getFrame(
            WaveLengthIDs['830'])

        if (not cameraConnected):
            return
        self.pointCloud = PointCloud(allDepth, HORIZONTAL_FOV_IN_DEG,
                                     VERTICAL_FOV_IN_DEG)
        self.__saveToFile(self.pointCloud.toPCD())

    def __saveToFile(self, s: str):
        filename = f'./output/snapshot_{"{:%m_%d_%H_%M_%S}".format(datetime.now())}.pcd'
        with open(filename, 'w') as f:
            f.write(s)
            print(f'saved to: {filename}')
예제 #5
0
    def __init__(self, src, dst, plot=True, reinit=False):
        """
        Initialize ICPBase class object

        :param src: source PointCloud object
        :param dst: destination PointCloud object
        :param plot: visualize the registration or not
        :param reinit: re-initialize the current PointCloud when local minima is encounted, or not
        """
        self.src = src
        self.dst = dst
        assert self.src.num == self.dst.num
        self.result = PointCloud(src)
        self.plot = plot
        self.reinit = reinit
        if self.plot: self.plotter = PointCloudPlotter(self.dst)
예제 #6
0
    def computeCorrespondence(self):
        """
        Compute point correspondence from result PointCloud to dst.
        CUDA function and summation reduction is called here.

        :return: total distance and matrix with point correspondence
        """
        super(ICPParallel, self).computeCorrespondence()

        target = np.zeros([self.src.num, 3], dtype=np.float32)
        self.computeCorrespondenceCuda(cuda.In(self.result.points),
                                       cuda.Out(target),
                                       self.distances_gpu,
                                       block=(self.numCore, 1, 1))

        return gpuarray.sum(self.distances_gpu).get(), PointCloud(target)
예제 #7
0
    def computeCorrespondence(self):
        """
        Compute point correspondence from source to destination PointCloud.
        Numpy is extensively used for computing and comparing distances.

        :return: total distance between PointCloud result and target
        """
        super(ICP, self).computeCorrespondence()

        indexArray = np.zeros(self.result.num, dtype=np.int)
        totalDistance = 0.0
        for resultIndex, resultValue in enumerate(self.result.points):
            minIndex, minDistance = -1, np.inf
            for dstIndex, dstValue in enumerate(self.dst.points):
                distance = np.linalg.norm(resultValue - dstValue)
                if distance < minDistance:
                    minDistance, minIndex = distance, dstIndex
            indexArray[resultIndex] = minIndex
            totalDistance += minDistance

        return totalDistance, PointCloud(self.dst.points[indexArray, 0:3])
예제 #8
0
class ICPBase(object):
    """
    Base class for traditional ICP and paralleled ICP

    - Sketches the ICP algorithm skeleton
    - Define common attributes and functions
    """
    def __init__(self, src, dst, plot=True, reinit=False):
        """
        Initialize ICPBase class object

        :param src: source PointCloud object
        :param dst: destination PointCloud object
        :param plot: visualize the registration or not
        :param reinit: re-initialize the current PointCloud when local minima is encounted, or not
        """
        self.src = src
        self.dst = dst
        assert self.src.num == self.dst.num
        self.result = PointCloud(src)
        self.plot = plot
        self.reinit = reinit
        if self.plot: self.plotter = PointCloudPlotter(self.dst)



    def getPointCloudRegistration(self, target):
        """
        Compute the PointCloud registration with correspondence.
        It is same for both ICP and ICPParallel.

        :param target: the target PointCloud to register with
        :return: R: Rotation between PointCloud result and target
                T: Translation between PointCloud result and target
        """
        assert self.result and target
        assert self.result.num == target.num

        H = np.dot(self.result.normPoints.T, target.normPoints)
        U, S, Vt = np.linalg.svd(H)
        R = np.dot(Vt.T, U.T)
        # Consider reflection case
        if np.linalg.det(R) < 0:
            Vt[2,:] *= -1
            R = np.dot(Vt.T, U.T)
        # Raise a warning if the SVD is not correctly performed
        if abs(np.linalg.det(R) - 1.0) > 0.0001:
            Warning("Direct Point Cloud registration unstable!")
        T = target.center - self.result.center.dot(R.T)
        return R, T


    def computeCorrespondence(self):
        """
        Base function for finding correspondence.
        Assertion only. Detailed implementation varies.

        :return: total distance and updated PointCloud result
        """
        assert self.result and self.dst
        assert self.result.num == self.dst.num



    def solve(self):
        """
        Algorithmatic skeleton of ICP.
        It is common for both ICP and ICPParallel
        See PDF report or Besl's paper for more detials.

        :return: None
        """
        print 'Solve ICP with', self.__class__.__name__
        distanceThres, maxIteration, iteration = 0.001, 20, 0
        # Perform the initial computation of correspondence
        currentDistance, target = self.computeCorrespondence()
        print "Init ICP, distance: %f" % currentDistance
        # ICP loop
        while currentDistance > distanceThres and iteration < maxIteration:
            if self.plot: self.plotter.plotData(self.result)
            # Compute tranformation between self.result and self.target
            R, T = self.getPointCloudRegistration(target)

            # Appy transformation to self.result
            self.result.applyTransformation(R, T)

            # DEBUG
            if self.reinit:
                if np.amax(np.abs(R - np.identity(3))) < 0.000001 and np.amax(np.abs(T)) < 0.000001:
                    self.result.applyTransformation(getRandomRotation(), getRandomTranslation())

            # Compute point correspondence
            currentDistance, target = self.computeCorrespondence()

            # Update
            iteration += 1
            print "Iteration: %5d, with total distance: %f" % (iteration, currentDistance)
예제 #9
0
 def snapPointCloud(self):
     depthValues = self.__getDepth()  # /TODO not considering freshness
     self.pointCloud = PointCloud(depthValues, HORIZONTAL_FOV_IN_DEG, 32.0,
                                  VERTICAL_FOV_IN_DEG)
     return True
예제 #10
0
    from optparse import OptionParser
    parser = OptionParser()
    parser.add_option('-n',
                      '--number',
                      dest='pointNum',
                      help='number of points in the point cloud',
                      type='int',
                      default=256)
    parser.add_option('-p',
                      '--plot',
                      dest='plot',
                      action='store_true',
                      help='visualize icp result',
                      default=False)
    parser.add_option('-c',
                      '--core',
                      dest='coreNum',
                      help='number of threads used in the cuda',
                      type='int',
                      default=0)
    (options, args) = parser.parse_args()
    pc1 = PointCloud()
    pc1.initFromRand(options.pointNum, 0, 1, 10, 20, 0, 1)
    pc2 = PointCloud(pc1)
    pc2.applyTransformation()
    icpSolver = ICPParallel(pc1,
                            pc2,
                            numCore=options.coreNum,
                            plot=options.plot)
    icpSolver.solve()
예제 #11
0
    def predict_bounding_boxes(self,
                               pointcloud,
                               set_next_bounding_boxes=False,
                               bounding_factor=.5,
                               eps=0.1):
        next_bounding_boxes = []
        for bounding_box in self.bounding_boxes:
            filtered_pointcloud = pointcloud.filter_points()
            filtered_pointcloud_indices = bounding_box.filter_points(
                filtered_pointcloud, bounding_factor=bounding_factor)
            filtered_points = filtered_pointcloud.points[
                filtered_pointcloud_indices, :]
            x, y = filtered_points[:, 0], filtered_points[:, 1]
            z = filtered_pointcloud.intensities[filtered_pointcloud_indices]

            # fig = plt.figure()
            # ax = fig.add_subplot(111, projection='3d')
            # ax.scatter(x, y, z)
            # plt.show()

            sorted_x, sorted_y = np.sort(x), np.sort(y)

            resolution = max(
                eps,
                min(np.min(np.ediff1d(sorted_x)),
                    np.min(np.ediff1d(sorted_y))))
            h, w = int((np.max(x) - np.min(x)) // resolution) + 1, int(
                (np.max(y) - np.min(y)) // resolution) + 1
            print(h, w, resolution)

            im = -np.ones((h, w)) * 5e-2
            quantized_x = ((filtered_points[:, 0] - np.min(x)) //
                           resolution).astype(int)
            quantized_y = ((filtered_points[:, 1] - np.min(y)) //
                           resolution).astype(int)
            im[quantized_x, quantized_y] = 1

            mask_h = int(bounding_box.width // resolution) + 1
            mask_w = int(bounding_box.length // resolution) + 1

            mask = np.ones((mask_h, mask_w))

            # plt.scatter(x, y)
            # plt.show()

            print("mask shape: ", mask.shape)
            cc = signal.correlate2d(im, mask, mode="same")
            center = (np.array([np.argmax(cc) // w,
                                np.argmax(cc) % w]) * resolution +
                      np.array([np.min(x), np.min(y)]))
            upper_right = center + np.array(
                [bounding_box.width / 2, bounding_box.length / 2])
            lower_left = center - np.array(
                [bounding_box.width / 2, bounding_box.length / 2])
            theta = bounding_box.angle
            box_pointcloud = PointCloud(np.vstack((upper_right, lower_left)))
            corners = box_pointcloud.rigid_transform(theta, center) + center
            next_bounding_boxes.append([corners.tolist(), theta])
            print(
                np.argmax(cc) // w,
                np.argmax(cc) % w, np.argmax(cc), np.max(cc),
                cc[np.argmax(cc) // w, np.argmax(cc) % w])
            # plt.subplot(1,2,1)
            # plt.imshow(im, cmap='Greys',  interpolation='nearest')
            # plt.subplot(1,2,2)
            # plt.imshow(cc, cmap='Greys',  interpolation='nearest')

            # plt.show()
        return next_bounding_boxes
예제 #12
0
from voxelcloud import VoxelCloud
from componentcloud import ComponentCloud

# Parameters
name = "bildstein5_extract.ply"
ply_file = Path("..") / "data" / "relabeled_clouds" / name

c_D = 0.25
segment_out_ground = True

data = read_ply(ply_file)

# Creating point cloud
pc = PointCloud(points=np.vstack((data['x'], data['y'], data['z'])).T,
                laser_intensity=data['reflectance'],
                rgb_colors=np.vstack(
                    (data['red'], data['green'], data['blue'])).T,
                label=data['label'])

# Creating voxel cloud
vc = VoxelCloud(pointcloud=pc,
                max_voxel_size=0.3,
                min_voxel_length=4,
                threshold_grow=1.5,
                method="regular")
vc.remove_poorly_connected_voxels(c_D=0.25, threshold_nb_voxels=10)

# Plotting for inspection
c = vc.features['mean_color']
plot(vc, colors=c, also_unassociated_points=True, only_voxel_center=False)
예제 #13
0
class Robot():
    def __init__(self, scene):
        self.scene = scene
        self.dynamics = 1

        # dynamics parameters
        self.l = 0.331

        # state
        self.xi = State(0, 0, 0, self)
        self.xid = State(3, 0, 0, self)
        self.xid0 = State(3, 0, math.pi / 4, self)
        self.reachedGoal = False
        # Control parameters
        self.kRho = 1
        self.kAlpha = 6
        self.kPhi = -1
        self.kV = 3.8
        self.gamma = 0.15

        #
        self.pointCloud = PointCloud(self)

        # Data to be recorded
        self.recordData = False
        self.data = Data(self)
        self.v1Desired = 0
        self.v2Desired = 0

        self.role = None
        self.neighbors = []
        self.leader = None  # Only for data recording purposes

        self.ctrl1_sm = []
        self.ctrl2_sm = []

    def propagateDesired(self):
        if self.dynamics == 5:
            pass
        elif self.dynamics == 4 or self.dynamics == 11:
            # Circular desired trajectory, depricated.
            t = self.scene.t
            radius = 2
            omega = 0.2
            theta0 = math.atan2(self.xid0.y, self.xid0.x)
            rho0 = (self.xid0.x**2 + self.xid0.y**2)**0.5
            self.xid.x = (radius * math.cos(omega * t) +
                          rho0 * math.cos(omega * t + theta0))
            self.xid.y = (radius * math.sin(omega * t) +
                          rho0 * math.sin(omega * t + theta0))
            self.xid.vx = -(radius * omega * math.sin(omega * t) +
                            rho0 * omega * math.sin(omega * t + theta0))
            self.xid.vy = (radius * omega * math.cos(omega * t) +
                           rho0 * omega * math.cos(omega * t + theta0))
            self.xid.theta = math.atan2(self.xid.vy, self.xid.vx)
            #self.xid.omega = omega

            c = self.l / 2
            self.xid.vxp = self.xid.vx - c * math.sin(self.xid.theta) * omega
            self.xid.vyp = self.xid.vy + c * math.cos(self.xid.theta) * omega
        elif self.dynamics == 16:
            # Linear desired trajectory
            t = self.scene.t
            #dt = self.scene.dt
            x = self.scene.xid.x
            y = self.scene.xid.y
            #print('x = ', x, 'y = ', y)
            theta = self.scene.xid.theta
            #print('theta = ', theta)
            sDot = self.scene.xid.sDot
            thetaDot = self.scene.xid.thetaDot

            phii = math.atan2(self.xid0.y, self.xid0.x)
            rhoi = (self.xid0.x**2 + self.xid0.y**2)**0.5
            #print('phii = ', phii)
            self.xid.x = x + rhoi * math.cos(phii)
            self.xid.y = y + rhoi * math.sin(phii)
            self.xid.vx = sDot * math.cos(theta)
            self.xid.vy = sDot * math.sin(theta)
            #print('vx: ', self.xid.vx, 'vy:', self.xid.vy)
            #print('v', self.index, ' = ', (self.xid.vx**2 + self.xid.vy**2)**0.5)

            dpbarx = -self.scene.xid.dpbarx
            dpbary = -self.scene.xid.dpbary
            if (dpbarx**2 + dpbary**2)**0.5 > 1e-1:
                self.xid.theta = math.atan2(dpbary, dpbarx)
            #if (self.xid.vx**2 + self.xid.vy**2)**0.5 > 1e-3:
            #    self.xid.theta = math.atan2(self.xid.vy, self.xid.vx)
            #self.xid.omega = omega

            c = self.l / 2
            self.xid.vxp = self.xid.vx - c * math.sin(
                self.xid.theta) * thetaDot
            self.xid.vyp = self.xid.vy + c * math.cos(
                self.xid.theta) * thetaDot
            self.xid.vRef = self.scene.xid.vRef
        elif self.dynamics == 17 or self.dynamics == 18:
            self.xid.theta = self.scene.xid.vRefAng

    def precompute(self):
        self.xi.transform()
        self.xid.transform()
        self.updateNeighbors()

    def propagate(self):
        if self.scene.vrepConnected == False:
            self.xi.propagate(self.control)
        else:
            omega1, omega2 = self.control()
            vrep.simxSetJointTargetVelocity(self.scene.clientID,
                                            self.motorLeftHandle, omega1,
                                            vrep.simx_opmode_oneshot)
            vrep.simxSetJointTargetVelocity(self.scene.clientID,
                                            self.motorRightHandle, omega2,
                                            vrep.simx_opmode_oneshot)

    def updateNeighbors(self):
        self.neighbors = []
        self.leader = None
        for j in range(len(self.scene.robots)):
            if self.scene.adjMatrix[self.index, j] == 0:
                continue
            robot = self.scene.robots[j]  # neighbor
            self.neighbors.append(robot)
            if robot.role == self.scene.ROLE_LEADER:
                if self.leader is not None:
                    raise Exception(
                        'There cannot be more than two leaders in a scene!')
                self.leader = robot

    def control(self):
        if self.learnedController is not None:
            mode = self.learnedController()
            observation, action_1 = self.data.getObservation(mode)
            if observation is None:
                action = np.array([[0, 0]])
            else:
                action = self.learnedController(observation, action_1)
            #action = np.array([[0, 0]])
            v1 = action[0, 0]
            v2 = action[0, 1]

            self.ctrl1_sm.append(v1)
            self.ctrl2_sm.append(v2)
            if len(self.ctrl1_sm) < 10:
                v1 = sum(self.ctrl1_sm) / len(self.ctrl1_sm)
                v2 = sum(self.ctrl2_sm) / len(self.ctrl2_sm)
            else:
                v1 = sum(self.ctrl1_sm[len(self.ctrl1_sm) -
                                       10:len(self.ctrl1_sm)]) / 10
                v2 = sum(self.ctrl2_sm[len(self.ctrl2_sm) -
                                       10:len(self.ctrl2_sm)]) / 10

            #print(v1,v2,'dnn')

        elif self.dynamics == 5:
            K3 = 0.15  # interaction between i and j

            # velocity in transformed space
            #vxp = 0.2
            #vyp = 0.3

            vxp = self.scene.xid.vxp
            vyp = self.scene.xid.vyp

            tauix = 0
            tauiy = 0
            for robot in self.neighbors:
                pijx = self.xi.xp - robot.xi.xp
                pijy = self.xi.yp - robot.xi.yp
                pij0 = self.xi.distancepTo(robot.xi)
                pijd0 = self.xid.distancepTo(robot.xid)
                tauij0 = 2 * (pij0**4 - pijd0**4) / pij0**3
                tauix += tauij0 * pijx / pij0
                tauiy += tauij0 * pijy / pij0
            #tauix, tauiy = saturate(tauix, tauiy, dxypMax)
            vxp += -K3 * tauix
            vyp += -K3 * tauiy

            self.v1Desired = vxp
            self.v2Desired = vyp
            return vxp, vyp

        elif self.dynamics >= 15 and self.dynamics <= 18:
            # For e-puk dynamics
            # Feedback linearization
            # v1: left wheel speed
            # v2: right wheel speed
            K3 = 0.0  # interaction between i and j

            dxypMax = float('inf')
            if self.role == self.scene.ROLE_LEADER:  # I am a leader
                K1 = 1
                K2 = 1
            elif self.role == self.scene.ROLE_FOLLOWER:
                K1 = 0  # Reference position information is forbidden
                K2 = 1
            elif self.role == self.scene.ROLE_PEER:
                K1 = 1
                K2 = 0
                K3 = 0.15  # interaction between i and j
                dxypMax = 0.7

            # sort neighbors by distance
            if True:  #not hasattr(self, 'dictDistance'):
                self.dictDistance = dict()
                for j in range(len(self.scene.robots)):
                    if self.scene.adjMatrix[self.index, j] == 0:
                        continue
                    robot = self.scene.robots[j]  # neighbor
                    self.dictDistance[j] = self.xi.distancepTo(robot.xi)
                self.listSortedDistance = sorted(self.dictDistance.items(),
                                                 key=operator.itemgetter(1))

            # velocity in transformed space
            vxp = 0
            vyp = 0

            tauix = 0
            tauiy = 0
            # neighbors sorted by distances in descending order
            lsd = self.listSortedDistance
            jList = [lsd[0][0], lsd[1][0]]
            m = 2
            while m < len(lsd) and lsd[m][1] < 1.414 * lsd[1][1]:
                jList.append(lsd[m][0])
                m += 1
            #print(self.listSortedDistance)
            for j in jList:
                robot = self.scene.robots[j]
                pijx = self.xi.xp - robot.xi.xp
                pijy = self.xi.yp - robot.xi.yp
                pij0 = self.xi.distancepTo(robot.xi)
                if self.dynamics == 18:
                    pijd0 = self.scene.alpha
                else:
                    pijd0 = self.xid.distancepTo(robot.xid)
                tauij0 = 2 * (pij0**4 - pijd0**4) / pij0**3
                tauix += tauij0 * pijx / pij0
                tauiy += tauij0 * pijy / pij0

            # Achieve and keep formation
            #tauix, tauiy = saturate(tauix, tauiy, dxypMax)
            vxp += -K3 * tauix
            vyp += -K3 * tauiy

            # Velocity control toward goal
            #dCosTheta = math.cos(self.xi.theta) - math.cos(self.xid.theta)
            #print("dCosTheta: ", dCosTheta)
            #print("theta: ", self.xi.theta, "thetad: ", self.xid.theta)
            dxp = self.scene.xid.dpbarx  #+ self.l / 2 * dCosTheta
            dyp = self.scene.xid.dpbary  #+ self.l / 2 * dCosTheta
            # Velocity control toward goal
            #dxp = self.xi.xp - self.xid.xp
            #dyp = self.xi.yp - self.xid.yp
            # Limit magnitude
            dxp, dyp = saturate(dxp, dyp, dxypMax)
            vxp += -K1 * dxp
            vyp += -K1 * dyp

            # Take goal's speed into account
            vxp += K2 * self.xid.vxp
            vyp += K2 * self.xid.vyp

            kk = 1
            theta = self.xi.theta
            M11 = kk * math.sin(theta) + math.cos(theta)
            M12 = -kk * math.cos(theta) + math.sin(theta)
            M21 = -kk * math.sin(theta) + math.cos(theta)
            M22 = kk * math.cos(theta) + math.sin(theta)

            v1 = M11 * vxp + M12 * vyp
            v2 = M21 * vxp + M22 * vyp

            #v1 = 0.3
            #v2 = 0.3

        elif self.dynamics == 20:
            # step signal
            if self.scene.t < 1:
                v1 = 0
                v2 = 0
            else:
                v1 = self.arg2[0]
                v2 = self.arg2[1]

        elif self.dynamics == 21:
            # step signal
            if self.scene.t < 1:
                v1 = 0
                v2 = 0
            elif self.scene.t < 4:
                v1 = self.arg2[0]
                v2 = self.arg2[1]
            elif self.scene.t < 7:
                v1 = -self.arg2[0]
                v2 = -self.arg2[1]
            else:
                v1 = self.arg2[0]
                v2 = self.arg2[1]

        elif self.dynamics == 22:
            # step signal
            w = 0.3
            amp = 2
            t = self.scene.t
            t0 = 1
            if t < t0:
                v1 = 0
                v2 = 0
            else:
                v1 = amp * w * math.sin(w * (t - t0)) * self.arg2[0]
                v2 = amp * w * math.sin(w * (t - t0)) * self.arg2[1]

        else:
            raise Exception("Undefined dynanmics")

        #print("v1 = %.3f" % v1, "m/s, v2 = %.3f" % v2)

        vm = 0.7  # wheel's max linear speed in m/s
        # Find the factor for converting linear speed to angular speed
        if math.fabs(v2) >= math.fabs(v1) and math.fabs(v2) > vm:
            alpha = vm / math.fabs(v2)
        elif math.fabs(v2) < math.fabs(v1) and math.fabs(v1) > vm:
            alpha = vm / math.fabs(v1)
        else:
            alpha = 1
        v1 = alpha * v1
        v2 = alpha * v2

        # Limit maximum acceleration (deprecated)

        if LIMIT_MAX_ACC:

            dvMax = accMax * self.scene.dt

            # limit v1
            dv1 = v1 - self.v1Desired
            if dv1 > dvMax:
                self.v1Desired += dvMax
            elif dv1 < -dvMax:
                self.v1Desired -= dvMax
            else:
                self.v1Desired = v1
            v1 = self.v1Desired

            # limit v2
            dv2 = v2 - self.v2Desired
            if dv2 > dvMax:
                self.v2Desired += dvMax
            elif dv2 < -dvMax:
                self.v2Desired -= dvMax
            else:
                self.v2Desired = v2
            v2 = self.v2Desired
        elif not LIMIT_MAX_ACC:
            self.v1Desired = v1
            self.v2Desired = v2

        # Record data
        if (self.scene.vrepConnected and self.scene.SENSOR_TYPE == "VPL16"
                and self.VPL16_counter == 3 and self.recordData == True):
            self.data.add()

        # print('v = ', pow(pow(v1, 2) + pow(v2, 2), 0.5))

        if self.scene.vrepConnected:
            omega1 = v1 * 10.25
            omega2 = v2 * 10.25
            # return angular speeds of the two wheels
            return omega1, omega2
        else:
            # return linear speeds of the two wheels
            return v1, v2

    def draw(self, image, drawType):
        if drawType == 1:
            xi = self.xi
            #color = (0, 0, 255)
            color = self.scene.getRobotColor(self.index, 255, True)
        elif drawType == 2:
            xi = self.xid
            color = (0, 255, 0)
        r = self.l / 2
        rPix = round(r * self.scene.m2pix())
        dx = -r * math.sin(xi.theta)
        dy = r * math.cos(xi.theta)
        p1 = np.float32([[xi.x + dx, xi.y + dy]])
        p2 = np.float32([[xi.x - dx, xi.y - dy]])
        p0 = np.float32([[xi.x, xi.y]])
        p3 = np.float32([[xi.x + dy / 2, xi.y - dx / 2]])
        p1Pix = self.scene.m2pix(p1)
        p2Pix = self.scene.m2pix(p2)
        p0Pix = self.scene.m2pix(p0)
        p3Pix = self.scene.m2pix(p3)
        if USE_CV2 == True:
            if self.dynamics <= 1 or self.dynamics == 4 or self.dynamics == 5:
                cv2.circle(image, tuple(p0Pix[0]), rPix, color)
            else:
                cv2.line(image, tuple(p1Pix[0]), tuple(p2Pix[0]), color)
                cv2.line(image, tuple(p0Pix[0]), tuple(p3Pix[0]), color)

    def setPosition(self, stateVector=None):
        # stateVector = [x, y, theta]

        z0 = 0.1587
        if stateVector == None:
            x0 = self.xi.x
            y0 = self.xi.y
            theta0 = self.xi.theta
        elif len(stateVector) == 3:
            x0 = stateVector[0]
            y0 = stateVector[1]
            theta0 = stateVector[2]
            self.xi.x = x0
            self.xi.y = y0
            self.xi.theta = theta0
        else:
            raise Exception('Argument error!')
        if self.scene.vrepConnected == False:
            return
        vrep.simxSetObjectPosition(self.scene.clientID, self.robotHandle, -1,
                                   [x0, y0, z0], vrep.simx_opmode_oneshot)
        vrep.simxSetObjectOrientation(self.scene.clientID, self.robotHandle,
                                      -1, [0, 0, theta0],
                                      vrep.simx_opmode_oneshot)
        message = "Robot #" + str(self.index) + "'s pose is set to "
        message += "[{0:.3f}, {1:.3f}, {2:.3f}]".format(x0, y0, theta0)
        self.scene.log(message)

    def readSensorData(self):
        if self.scene.vrepConnected == False:
            return
        if "readSensorData_firstCall" not in self.__dict__:
            self.readSensorData_firstCall = True
        else:
            self.readSensorData_firstCall = False

        # Read robot states
        res, pos = vrep.simxGetObjectPosition(self.scene.clientID,
                                              self.robotHandle, -1,
                                              vrep.simx_opmode_blocking)
        if res != 0:
            raise VrepError("Cannot get object position with error code " +
                            str(res))
        res, ori = vrep.simxGetObjectOrientation(self.scene.clientID,
                                                 self.robotHandle, -1,
                                                 vrep.simx_opmode_blocking)
        if res != 0:
            raise VrepError("Cannot get object orientation with error code " +
                            str(res))
        res, vel, omega = vrep.simxGetObjectVelocity(self.scene.clientID,
                                                     self.robotHandle,
                                                     vrep.simx_opmode_blocking)
        if res != 0:
            raise VrepError("Cannot get object velocity with error code " +
                            str(res))
        #print("Linear speed: %.3f" % (vel[0]**2 + vel[1]**2)**0.5,
        #      "m/s. Angular speed: %.3f" % omega[2])
        #print("pos: %.2f" % pos[0], ", %.2f" % pos[1])
        #print("Robot #", self.index, " ori: %.3f" % ori[0], ", %.3f" % ori[1], ", %.3f" % ori[2])

        self.xi.x = pos[0]
        self.xi.y = pos[1]
        self.xi.alpha = ori[0]
        self.xi.beta = ori[1]
        self.xi.theta = ori[2]
        sgn = np.sign(
            np.dot(
                np.asarray(vel[0:2]),
                np.asarray([math.cos(self.xi.theta),
                            math.sin(self.xi.theta)])))
        self.vActual = sgn * (vel[0]**2 + vel[1]**2)**0.5
        self.omegaActual = omega[2]
        # Read laser/vision sensor data
        if self.scene.SENSOR_TYPE == "2d_":
            # self.laserFrontHandle
            # self.laserRearHandle

            if self.readSensorData_firstCall:
                opmode = vrep.simx_opmode_streaming
            else:
                opmode = vrep.simx_opmode_buffer
            laserFront_points = vrep.simxGetStringSignal(
                self.scene.clientID, self.laserFrontName + '_signal', opmode)
            print(self.laserFrontName + '_signal: ', len(laserFront_points[1]))
            laserRear_points = vrep.simxGetStringSignal(
                self.scene.clientID, self.laserRearName + '_signal', opmode)
            print(self.laserRearName + '_signal: ', len(laserRear_points[1]))
        elif self.scene.SENSOR_TYPE == "2d":  # deprecated
            raise Exception('2d sensor is not supported!!!!')
        elif self.scene.SENSOR_TYPE == "VPL16":
            # self.pointCloudHandle
            velodyne_points = vrep.simxCallScriptFunction(
                self.scene.clientID, self.pointCloudName, 1,
                'getVelodyneData_function', [], [], [], 'abc',
                vrep.simx_opmode_blocking)
            #print(len(velodyne_points[2]))
            #print(velodyne_points[2])
            res = velodyne_points[0]

            # Parse data
            if 'VPL16_counter' not in self.__dict__:
                self.VPL16_counter = 0
            # reset the counter every fourth time
            if self.VPL16_counter == 4:
                self.VPL16_counter = 0
            if self.VPL16_counter == 0:
                # Reset point cloud
                self.pointCloud.clearData()
            #print('VPL16_counter = ', self.VPL16_counter)
            self.pointCloud.addRawData(velodyne_points[2])  # will rotate here

            if self.VPL16_counter == 3:

                #print("Length of point cloud is " + str(len(self.pointCloud.data)))
                if res != 0:
                    raise VrepError("Cannot get point cloud with error code " +
                                    str(res))

                #start = time.clock()
                self.pointCloud.crop()
                #end = time.clock()
                #self.pointCloud.updateScanVector() # option 2
                self.pointCloud.updateOccupancyMap()  # option 1
                #print('Time elapsed: ', end - start)
            self.VPL16_counter += 1

        elif self.scene.SENSOR_TYPE == "kinect":
            pass
        else:
            return

    def getV1V2(self):
        v1 = self.vActual + self.omegaActual * self.l / 2
        v2 = self.vActual - self.omegaActual * self.l / 2
        return np.array([[v1, v2]])
예제 #14
0
def simpleicp(X_fix,
              X_mov,
              correspondences=1000,
              neighbors=10,
              min_planarity=0.3,
              min_change=1,
              max_iterations=100):

    start_time = time.time()
    log("Create point cloud objects ...")
    pcfix = PointCloud(X_fix[:, 0], X_fix[:, 1], X_fix[:, 2])
    pcmov = PointCloud(X_mov[:, 0], X_mov[:, 1], X_mov[:, 2])

    log("Select points for correspondences in fixed point cloud ...")
    pcfix.select_n_points(correspondences)
    sel_orig = pcfix.sel

    log("Estimate normals of selected points ...")
    pcfix.estimate_normals(neighbors)

    H = np.eye(4)
    residual_distances = []

    log("Start iterations ...")
    for i in range(0, max_iterations):

        initial_distances = matching(pcfix, pcmov)

        # Todo Change initial_distances without return argument
        initial_distances = reject(pcfix, pcmov, min_planarity,
                                   initial_distances)

        dH, residuals = estimate_rigid_body_transformation(
            pcfix.x[pcfix.sel], pcfix.y[pcfix.sel], pcfix.z[pcfix.sel],
            pcfix.nx[pcfix.sel], pcfix.ny[pcfix.sel], pcfix.nz[pcfix.sel],
            pcmov.x[pcmov.sel], pcmov.y[pcmov.sel], pcmov.z[pcmov.sel])

        residual_distances.append(residuals)

        pcmov.transform(dH)

        H = dH @ H
        pcfix.sel = sel_orig

        if i > 0:
            if check_convergence_criteria(residual_distances[i],
                                          residual_distances[i - 1],
                                          min_change):
                log("Convergence criteria fulfilled -> stop iteration!")
                break

        if i == 0:
            log("{:9s} | {:15s} | {:15s} | {:15s}".format(
                "Iteration", "correspondences", "mean(residuals)",
                "std(residuals)"))
            log("{:9d} | {:15d} | {:15.4f} | {:15.4f}".format(
                0, len(initial_distances), np.mean(initial_distances),
                np.std(initial_distances)))
        log("{:9d} | {:15d} | {:15.4f} | {:15.4f}".format(
            i + 1, len(residual_distances[i]), np.mean(residual_distances[i]),
            np.std(residual_distances[i])))

    log("Estimated transformation matrix H:")
    log("H = [{:12.6f} {:12.6f} {:12.6f} {:12.6f}]".format(
        H[0, 0], H[0, 1], H[0, 2], H[0, 3]))
    log("    [{:12.6f} {:12.6f} {:12.6f} {:12.6f}]".format(
        H[1, 0], H[1, 1], H[1, 2], H[1, 3]))
    log("    [{:12.6f} {:12.6f} {:12.6f} {:12.6f}]".format(
        H[2, 0], H[2, 1], H[2, 2], H[2, 3]))
    log("    [{:12.6f} {:12.6f} {:12.6f} {:12.6f}]".format(
        H[3, 0], H[3, 1], H[3, 2], H[3, 3]))
    log("Finished in {:.3f} seconds!".format(time.time() - start_time))

    return H
예제 #15
0
    vc_backup_folder.mkdir()

for ply_file in relabeled_clouds_folder.glob("*.ply"):
    backup_file = vc_backup_folder / ply_file.name.replace("ply", "pkl")
    if overwrite or not backup_file.exists():
        print(f"Processing: {ply_file}")

        # Retrieve data
        data = read_ply(str(ply_file))
        cloud = np.vstack((data['x'], data['y'], data['z'])).T
        rgb_colors = np.vstack((data['red'], data['green'], data['blue'])).T
        dlaser = data['reflectance']
        label = data['label'] if "label" in data.dtype.names else None

        # Define clouds
        pc = PointCloud(cloud, dlaser, rgb_colors, label)
        vc = VoxelCloud(pc,
                        max_voxel_size=max_voxel_size,
                        threshold_grow=threshold_grow,
                        min_voxel_length=min_voxel_length,
                        method=method)

        # Save voxel cloud
        with open(backup_file, 'wb') as handle:
            pickle.dump(vc, handle, protocol=pickle.HIGHEST_PROTOCOL)
        print(f"Done processing: {ply_file}\n")

# %% Extract and backup component clouds

# Parameters
vc_backup_folder = Path("..") / "data" / "backup" / "voxel_cloud"
예제 #16
0
def plot_pcl_from_file(file_path, verbose=False):
    pcl = PointCloud.from_file(file_path)
    plot_pcl(pcl, window_title=file_path)
예제 #17
0
 def load_pcls(self, dataset_folder):
     self.pcls = []
     max_size = 0.22
     for i in range(len(self.pcl_names)):
         filepath = dataset_folder + 'pcls/' + self.pcl_names[i] + '.ply'
         self.pcls.append(PointCloud.from_file(filepath))
예제 #18
0
def init_tracker():
    json_request = request.get_json()
    pointcloud = PointCloud.parse_json(json_request["pointcloud"])
    tracker = Tracker(pointcloud)
    print(str(tracker))
    return "success"
예제 #19
0
from pointcloud import PointCloud
from pyvox import voxelize

filepath = "/home/paulo/mustard.ply"
voxel_grid_side = 30
max_size = 0.22
pcl = PointCloud.from_file(filepath)
vertices = pcl.vertices

voxel_grid = voxelize(vertices,
                      max_size=max_size,
                      voxel_grid_side=50,
                      plot=True)