Example #1
0
 def getLineSensors(self):
     leftReading = vrep.simxReadVisionSensor(self.clientID,
                                             self.sensor_left,
                                             vrep.simx_opmode_blocking)[1]
     rightReading = vrep.simxReadVisionSensor(self.clientID,
                                              self.sensor_right,
                                              vrep.simx_opmode_blocking)[1]
     return (leftReading, rightReading)
    def readVisionSensor(self, object):
        operation = self.__controlCommandsOnServer('vision_sensor', object)
        if operation is vrep.simx_opmode_streaming:
            vrep.simxReadVisionSensor(self.clientID, object, operation)

        _, sensor_reading, sensor_data = vrep.simxReadVisionSensor(
            self.clientID, object, vrep.simx_opmode_streaming)

        return sensor_reading, sensor_data
    def run(self):
        self.away = False
        while self.initial_position == [0.0, 0.0, 0.0]:
            return_code, self.initial_position = vrep.simxGetObjectPosition(
                self.clientID, self.handle, -1, vrep.simx_opmode_buffer)
            t = time.time()
        return_code, self.initial_position = vrep.simxGetObjectPosition(
            self.clientID, self.handle, -1, vrep.simx_opmode_buffer)
        t = time.time()
        while True:
            return_code, left_state, left_packets = vrep.simxReadVisionSensor(
                self.clientID, self.left_sensor_handle,
                vrep.simx_opmode_buffer)
            return_code, middle_state, middle_packets = vrep.simxReadVisionSensor(
                self.clientID, self.middle_sensor_handle,
                vrep.simx_opmode_buffer)
            return_code, right_state, right_packets = vrep.simxReadVisionSensor(
                self.clientID, self.right_sensor_handle,
                vrep.simx_opmode_buffer)

            linearVelocityLeft = self.nominalLinearVelocity * self.s
            linearVelocityRight = self.nominalLinearVelocity * self.s

            if left_state:
                linearVelocityRight = linearVelocityRight * self.adjust
            if right_state:
                linearVelocityLeft = linearVelocityLeft * self.adjust

            if left_state and right_state and self.away:
                self.fitness = math.inf
                break

            vrep.simxSetJointTargetVelocity(
                self.clientID, self.left_joint_handle,
                linearVelocityLeft / (self.s * self.wheelRadius),
                vrep.simx_opmode_streaming)
            vrep.simxSetJointTargetVelocity(
                self.clientID, self.right_joint_handle,
                linearVelocityRight / (self.s * self.wheelRadius),
                vrep.simx_opmode_streaming)

            return_code, self.position = vrep.simxGetObjectPosition(
                self.clientID, self.handle, -1, vrep.simx_opmode_buffer)
            euclidean_dist = (
                ((self.position[0] - self.initial_position[0])**2) +
                ((self.position[1] - self.initial_position[1])**2))**(1 / 2)
            if euclidean_dist > 0.05:
                self.away = True
            if euclidean_dist < 0.05 and self.away:
                self.away = False
                self.fitness = time.time() - t
                t = time.time()
                break
            time.sleep(0.01)
        return self.fitness
def getDepthImage(clientID):
    imageCount = 0  # to keep track of how many images have been processed
    processLimit = 5

    #    print ('Vision Sensor object handling')
    errorCode, rgbCamHandle = vrep.simxGetObjectHandle(
        clientID, 'kinect_rgb', vrep.simx_opmode_oneshot_wait)
    errorCode, xyzCamHandle = vrep.simxGetObjectHandle(
        clientID, 'kinect_pointcloud', vrep.simx_opmode_oneshot_wait)

    #    print ('Setting up First Calls for Reading Images and Sensor Values')
    # Note that the image takes a while to display so cannot load first image but
    # have to set up a buffer
    errorRGB, resolution, rgbImage = vrep.simxGetVisionSensorImage(
        clientID, rgbCamHandle, 0, vrep.simx_opmode_streaming)

    # api return value for the coordinate extraction filter in camera is as follows:
    # (1) point count along x, (2) point count along Y, (3) point x1, (4) point y1,
    # (5) point z1, (6) distance point1, (7) point x2, etc.
    # and auxPacket[1] holds  data in the format described above
    errorXYZ, detectionState, auxPackets = vrep.simxReadVisionSensor(
        clientID, xyzCamHandle, vrep.simx_opmode_streaming)

    # loop while connected to simulation
    while (vrep.simxGetConnectionId(clientID) != -1):

        # Read in camera information (Note operating mode changed to buffer)
        errorRGB, resolution, rgbImage = vrep.simxGetVisionSensorImage(
            clientID, rgbCamHandle, 0, vrep.simx_opmode_buffer)
        errorXYZ, detectionState, auxPackets = vrep.simxReadVisionSensor(
            clientID, xyzCamHandle, vrep.simx_opmode_buffer)

        errorCodes = [errorRGB, errorXYZ]

        # Check if have both image and xyz coordinate data available
        if (all(errors == vrep.simx_return_ok for errors in errorCodes)):
            # print ("image OK!!!")
            imageCount += 1

        elif (all(errors == vrep.simx_return_novalue_flag
                  for errors in errorCodes)):
            # print ("no image yet")
            pass
        else:
            # print (errorCodes)
            pass

        # exit if processed more than certain amount of images
        if imageCount > processLimit:
            break

    return auxPackets, resolution, rgbImage
def getSensorData(clientID, sensorHandles):
    res, aux, auxD = vrep.simxReadVisionSensor(clientID, sensorHandles[0],
                                               vrep.simx_opmode_buffer)
    result1 = transformInMatrix(auxD)
    result1 = convertTransformedDataSensor1(clientID, sensorHandles[0],
                                            result1)

    res, aux, auxD = vrep.simxReadVisionSensor(clientID, sensorHandles[1],
                                               vrep.simx_opmode_buffer)
    result2 = transformInMatrix(auxD)
    result2 = convertTransformedDataSensor1(clientID, sensorHandles[1],
                                            result2)

    return result1 + result2
Example #6
0
 def get_cloud(self):
     panda_id = self.cid
     # Get handle to camera
     sim_ret, cam_handle = vrep.simxGetObjectHandle(
         panda_id, 'kinect_depth', vrep.simx_opmode_blocking)
     # Get camera pose and intrinsics in simulation
     emptyBuff = self.dummybyte
     res, retInts, retFloats, retStrings, retBuffer = vrep.simxCallScriptFunction(
         panda_id, 'kinect', vrep.sim_scripttype_childscript, 'absposition',
         [], [], [], emptyBuff, vrep.simx_opmode_blocking)
     R = np.asarray(
         [[retFloats[0], retFloats[1], retFloats[2], retFloats[3]],
          [retFloats[4], retFloats[5], retFloats[6], retFloats[7]],
          [retFloats[8], retFloats[9], retFloats[10], retFloats[11]]])
     # print('camera pose is: ',R)
     result, state, data = vrep.simxReadVisionSensor(
         panda_id, cam_handle, vrep.simx_opmode_blocking)
     data = data[1]
     pcl = []
     for i in range(2, len(data), 4):
         p = [data[i], data[i + 1], data[i + 2], 1]
         pcl.append(np.matmul(R, p))
     pcd = remove_clipping(pcl)
     np.savetxt('data.pcd', pcd, delimiter=' ')
     insertHeader('data.pcd')
     return pcd
Example #7
0
    def read_lidar(self, handle, mode='blocking'):
        """
        Reads lidar information given a sensor handle
        """

        info = vrep.simxReadVisionSensor(self.client_id, handle, self.modes[mode])
        return info
Example #8
0
def setup_sim_camera(cid):
    '''
    Fetch pcd data and object matrix from V-RAP and transform to world frame
    :return: Raw pcd data
    '''
    # Get handle to camera
    sim_ret, cam_handle = vrep.simxGetObjectHandle(cid, 'kinect_depth', vrep.simx_opmode_blocking)
    # Get camera pose and intrinsics in simulation
    # a, b, g = angles[0], angles[1], angles[2]
    # Rx = np.array([[1.0, 0.0, 0.0],
    #                [0.0, np.cos(a), -np.sin(a)],
    #                [0.0, np.sin(a), np.cos(a)]], dtype=np.float32)
    # Ry = np.array([[np.cos(b), 0.0,  np.sin(b)],
    #                [0.0, 1.0, 0.0],
    #                [-np.sin(b), 0.0, np.cos(b)]], dtype=np.float32)
    # Rz = np.array([[np.cos(g), -np.sin(g), 0.0],
    #                [np.sin(g), np.cos(g), 0.0],
    #                [0.0, 0.0, 1.0]], dtype=np.float32)
    # R = np.dot(Rz, np.dot(Ry, Rx))
    emptyBuff = bytearray()
    res, retInts, retFloats, retStrings, retBuffer = vrep.simxCallScriptFunction(cid, 'kinect',
                                                                                 vrep.sim_scripttype_childscript,
                                                                                 'absposition', [], [], [], emptyBuff,
                                                                                 vrep.simx_opmode_blocking)
    R = np.asarray([[retFloats[0], retFloats[1], retFloats[2], retFloats[3]],
                    [retFloats[4], retFloats[5], retFloats[6], retFloats[7]],
                    [retFloats[8], retFloats[9], retFloats[10], retFloats[11]]])
    print('camera pose is: ',R)
    result, state, data = vrep.simxReadVisionSensor(cid, cam_handle, vrep.simx_opmode_blocking)
    data = data[1]
    pcl = []
    for i in range(2, len(data), 4):
        p = [data[i], data[i + 1], data[i + 2], 1]
        pcl.append(np.matmul(R, p))
    return pcl
Example #9
0
def setup_sim_camera(cid):
    '''
    Fetch pcd data and object matrix from V-RAP and transform to world frame
    :return: Raw pcd data
    '''
    # Get handle to camera
    sim_ret, cam_handle = vrep.simxGetObjectHandle(cid, 'kinect_depth',
                                                   vrep.simx_opmode_blocking)
    emptyBuff = bytearray()
    res, retInts, retFloats, retStrings, retBuffer = vrep.simxCallScriptFunction(
        cid, 'kinect', vrep.sim_scripttype_childscript, 'absposition', [], [],
        [], emptyBuff, vrep.simx_opmode_blocking)
    R = np.asarray([[retFloats[0], retFloats[1], retFloats[2], retFloats[3]],
                    [retFloats[4], retFloats[5], retFloats[6], retFloats[7]],
                    [retFloats[8], retFloats[9], retFloats[10],
                     retFloats[11]]])
    print('camera pose is: ', R)
    result, state, data = vrep.simxReadVisionSensor(cid, cam_handle,
                                                    vrep.simx_opmode_blocking)
    data = data[1]
    pcl = []
    for i in range(2, len(data), 4):
        p = [data[i], data[i + 1], data[i + 2], 1]
        pcl.append(np.matmul(R, p))
    return pcl
    def initSensors(self):
        error_code, self.left_sensor_handle = vrep.simxGetObjectHandle(
            self.clientID, 'LeftSensor', vrep.simx_opmode_oneshot_wait)
        return_code, left_state, left_packets = vrep.simxReadVisionSensor(
            self.clientID, self.left_sensor_handle, vrep.simx_opmode_streaming)

        error_code, self.middle_sensor_handle = vrep.simxGetObjectHandle(
            self.clientID, 'MiddleSensor', vrep.simx_opmode_oneshot_wait)
        return_code, middle_state, middle_packets = vrep.simxReadVisionSensor(
            self.clientID, self.middle_sensor_handle,
            vrep.simx_opmode_streaming)

        error_code, self.right_sensor_handle = vrep.simxGetObjectHandle(
            self.clientID, 'RightSensor', vrep.simx_opmode_oneshot_wait)
        return_code, right_state, right_packets = vrep.simxReadVisionSensor(
            self.clientID, self.right_sensor_handle,
            vrep.simx_opmode_streaming)
Example #11
0
 def get_vis_sensors(self):
     """Returns a list of lists containing the average red/green/blue value
     from the left/middle/right vision sensor."""
     aves = []
     for loc in self.vs_locs:
         h = self.vss[loc]
         _,_,pkt = vrep.simxReadVisionSensor(client_id, h, opmode)
         aves.append(pkt[0][11:14])
     return aves
def initializeSensor(clientID):
    # start Hokuyo sensor
    res = vrep.simxSetIntegerSignal(clientID, 'handle_xy_sensor', 2,
                                    vrep.simx_opmode_oneshot)

    # display range sensor beam
    vrep.simxSetIntegerSignal(clientID, 'displaylasers', 1,
                              vrep.simx_opmode_oneshot)

    # Get sensor handle:
    hokuyo = getSensorHandles(clientID)

    # initialize sensor to retrieve data
    # to retrieve sensor data, the streaming opmode streaming is at least one time necessary
    res, aux, auxD = vrep.simxReadVisionSensor(clientID, hokuyo[0],
                                               vrep.simx_opmode_streaming)
    res, aux, auxD = vrep.simxReadVisionSensor(clientID, hokuyo[1],
                                               vrep.simx_opmode_streaming)

    return hokuyo
Example #13
0
 def get_light_sensors(self):
     """ Light sensors data getter
     
     Returns:
         [list] -- [list of light sensors data]
     """
     res_list = []
     for i in list(filter(lambda x: 'sensor_color' in x[0] ,self.components.items())):
         res, _, prox = vrep.simxReadVisionSensor(self.client_id, i[1]['id'], vrep.simx_opmode_blocking)
         if res != vrep.simx_return_ok:
             err_print(prefix="GET LIGHT SENSORS: ", message=err_list)
         res_list.append(prox[0][11:14])
     return res_list
Example #14
0
    def sense(self):
        """
        Sense the world and return data
        :return: constructed dictionary of the form {DEPTH, {BLOB_COLOR, BLOB_POSITION}, BLOB_SIZE}
        """

        out = {}

        # retrieve depth data
        result, resolution, data = vrep.simxGetVisionSensorDepthBuffer(self._clientID,
                                                                       self.sensors_handles['kinect_depth'],
                                                                       self._operation_mode)
        if result != vrep.simx_return_ok:  # checking the reading result.
            exit(result)

        # get clean depth data
        out['depth'] = self.get_depth(data)  # appending the distance depth.

        # retrieve vision sensor image
        result_vision, resolution, image = vrep.simxGetVisionSensorImage(self._clientID,
                                                                         self.sensors_handles['kinect_rgb'],
                                                                         0,
                                                                         vrep.simx_opmode_blocking)
        # retrieve vision sensor filtered image (blob)
        result_blob, t0, t1 = vrep.simxReadVisionSensor(self._clientID,
                                                        self.sensors_handles['kinect_rgb'],
                                                        vrep.simx_opmode_blocking)

        # extract blob data
        out['vision'] = self.get_vision(resolution, image, t1)

        # get load status
        out['load'] = self._load

        self._term.write("sensed: {}".format(out))

        return out
Example #15
0
 def lerSensor(self, corpo):
     x = vrep.simxReadVisionSensor(self.cliente, corpo,
                                   vrep.simx_opmode_streaming)
     return x[1]
# Retrieve sensor handle
errorCodeS,sensor_handle=vrep.simxGetObjectHandle(clientID, \
                                 'sensingNose',vrep.simx_opmode_oneshot_wait)
# Request Proximity sensor values in Streaming mode
errorCodeR,detectionState,detectedPoint,detectedObjectHandle,detectedSurfaceNormalVector= \
                    vrep.simxReadProximitySensor(clientID,sensor_handle,vrep.simx_opmode_streaming)

# IR sensors
errorCodeLS,left_sensor_handle=vrep.simxGetObjectHandle(clientID, \
                                'leftSensor',vrep.simx_opmode_oneshot_wait)
errorCodeMS,middle_sensor_handle=vrep.simxGetObjectHandle(clientID, \
                                'middleSensor',vrep.simx_opmode_oneshot_wait)
errorCodeRS,right_sensor_handle=vrep.simxGetObjectHandle(clientID, \
                                'rightSensor',vrep.simx_opmode_oneshot_wait)
# request IR sensor values in Streaming mode
errorCode_m, flag_ms, middle_sensor_data = vrep.simxReadVisionSensor(
    clientID, middle_sensor_handle, vrep.simx_opmode_streaming)
errorCode_l, flag_ls, left_sensor_data = vrep.simxReadVisionSensor(
    clientID, left_sensor_handle, vrep.simx_opmode_streaming)
errorCode_r, flag_rs, right_sensor_data = vrep.simxReadVisionSensor(
    clientID, right_sensor_handle, vrep.simx_opmode_streaming)

#Set initial simulation properties
#wheelVelocityLeft = wheelVelocityRight    # initial wheel veloicties
wheelVelocityLeft = wheelVelocityRight = 1
simulationStartTime = time.time()  # simulation start time
simulationTime = 30  # simulation time
backUntilTime = -1  # line following correction time


# motion functions
def moveBackward(speed):
Example #17
0
                                                  vrep.simx_opmode_blocking)
returnCode, robot_handle = vrep.simxGetObjectHandle(clientID, 'LineTracer',
                                                    vrep.simx_opmode_blocking)
returnCode, leftsensor = vrep.simxGetObjectHandle(clientID, 'LeftSensor',
                                                  vrep.simx_opmode_blocking)
returnCode, midsensor = vrep.simxGetObjectHandle(clientID, 'MiddleSensor',
                                                 vrep.simx_opmode_blocking)
returnCode, body = vrep.simxGetObjectHandle(clientID, 'dr12_body_',
                                            vrep.simx_opmode_blocking)
returnCode, floor = vrep.simxGetObjectHandle(clientID, 'ResizableFloor_5_25',
                                             vrep.simx_opmode_blocking)

left_sensor1 = 0
mid_sensor1 = 0
right_sensor1 = 0
left_sensor1 = vrep.simxReadVisionSensor(clientID, leftsensor,
                                         vrep.simx_opmode_streaming)
mid_sensor1 = vrep.simxReadVisionSensor(clientID, midsensor,
                                        vrep.simx_opmode_streaming)
# right_sensor1 = vrep.simxReadVisionSensor(clientID, rightsensor, vrep.simx_opmode_streaming)
time.sleep(5)
odometer_val = vrep.simxGetObjectPosition(clientID, leftmotor, floor,
                                          vrep.simx_opmode_continuous)

# returnCode=vrep.simxSetJointTargetVelocity(clientID,leftmotor,31.416/4,vrep.simx_opmode_streaming)
# returnCode=vrep.simxSetJointTargetVelocity(clientID,rightmotor,31.416/4,vrep.simx_opmode_streaming)

odometer_val = vrep.simxGetObjectPosition(clientID, leftmotor, floor,
                                          vrep.simx_opmode_continuous)
count = 0
prev_sec = time.time()
prev_yaw = yaw_angle()
Example #18
0
errorCode,usensorV1=vrep.simxGetObjectHandle(clientID,'Vision_sensor1',vrep.simx_opmode_oneshot_wait)
errorCode,usensorV2=vrep.simxGetObjectHandle(clientID,'Vision_sensor2',vrep.simx_opmode_oneshot_wait)

#for i in range(1,100):
#
#	vrep.simxSetJointTargetVelocity(clientID,left_motor,2,vrep.simx_opmode_oneshot_wait)
#	vrep.simxSetJointTargetVelocity(clientID,right_motor,2,vrep.simx_opmode_oneshot_wait)
lroute = True
for i in range(1,10000):

	vrep.simxSetJointTargetVelocity(clientID,left_motor,0,vrep.simx_opmode_oneshot_wait)
	vrep.simxSetJointTargetVelocity(clientID,right_motor,0,vrep.simx_opmode_oneshot_wait)
	
	
	#sensor api
	error_code, det_state, aux = vrep.simxReadVisionSensor(clientID, usensorV,vrep.simx_opmode_oneshot_wait)
	print 'Sensor : vision det state: ', det_state
	if det_state == True:
		print 'fire detected'
	else:
		error_code, det_state, aux = vrep.simxReadVisionSensor(clientID, usensorV1,vrep.simx_opmode_oneshot_wait)
		print det_state, aux[1][1]
		if aux[1][1] > 0.3:
			lroute = not (lroute)
		print lroute
		
		if(lroute):
			print 'left following'
			error_code, det_state, det_point, det_handle, det_vec = vrep.simxReadProximitySensor(clientID, usensor4,vrep.simx_opmode_oneshot_wait)
			print 'Sensor : 4 det state: ', det_state,' det point : ', det_point[2]
			if det_state == True:
Example #19
0
     object_pos.append(pos)
     object_angle.append(orientation)
 # Generate object pcd file from V-REP
 sim_ret, cam_handle = vrep.simxGetObjectHandle(
     clientID, 'kinect_depth', vrep.simx_opmode_blocking)
 emptyBuff = bytearray()
 res, retInts, retFloats, retStrings, retBuffer = vrep.simxCallScriptFunction(
     clientID, 'kinect', vrep.sim_scripttype_childscript,
     'absposition', [], [], [], emptyBuff,
     vrep.simx_opmode_blocking)
 R = np.asarray(
     [[retFloats[0], retFloats[1], retFloats[2], retFloats[3]],
      [retFloats[4], retFloats[5], retFloats[6], retFloats[7]],
      [retFloats[8], retFloats[9], retFloats[10], retFloats[11]]])
 # print('camera pose is: ',R)
 result, state, data = vrep.simxReadVisionSensor(
     clientID, cam_handle, vrep.simx_opmode_blocking)
 data = data[1]
 pointcloud = []
 for i in range(2, len(data), 4):
     p = [data[i], data[i + 1], data[i + 2], 1]
     pointcloud.append(np.matmul(R, p))
     # Object segmentation
 pcl = remove_clipping(pointcloud)
 # Save pcd file
 np.savetxt('data.pcd', pcl, delimiter=' ')
 insertHeader('data.pcd')
 nb_clutters = segmentation('data.pcd')
 label = [0, 0, 0, 0, 0, 0, 0, 0]
 if nb_clutters == num_obj:
     continue
 print('Number of objects: %d' % nb_clutters)
Example #20
0
wheelRadius = 0.027
interWheelDistance = 0.119
'''
# Print object name list
result,joint_name,intData,floatData,stringData = vrep.simxGetObjectGroupData(clientID,vrep.sim_appobj_object_type,0,vrep.simx_opmode_blocking)
print(stringData)
'''
vrep.simxStartSimulation(clientID, vrep.simx_opmode_oneshot)
while (1):
    """ Read the sensors:"""
    dist = (-1, -1)
    olddist = dist[1]
    dist = vrep.simxGetFloatSignal(clientID, "laserPointerData",
                                   vrep.simx_opmode_streaming)
    sensorReading = [False, False, False]
    sensorReading[0] = (vrep.simxReadVisionSensor(
        clientID, leftSensor, vrep.simx_opmode_oneshot)[1] == 1)
    sensorReading[1] = (vrep.simxReadVisionSensor(
        clientID, middleSensor, vrep.simx_opmode_oneshot)[1] == 1)
    sensorReading[2] = (vrep.simxReadVisionSensor(
        clientID, rightSensor, vrep.simx_opmode_oneshot)[1] == 1)
    isRed = False
    if vrep.simxReadVisionSensor(clientID, middleSensor,
                                 vrep.simx_opmode_oneshot)[2]:
        isRed = (vrep.simxReadVisionSensor(
            clientID, middleSensor, vrep.simx_opmode_oneshot)[2][0][6] > 0.9)
        if (isRed == True):
            print("STOP")
            linearVelocityRight = 0
            linearVelocityLeft = 0
            vrep.simxSetJointTargetVelocity(clientID, leftJointDynamic,
                                            linearVelocityLeft / (wheelRadius),
Example #21
0
    # Initializate
    r, LeftSensor_handle = vrep.simxGetObjectHandle(clientID, "LeftSensor#",
                                                    vrep.simx_opmode_blocking)
    r, MiddleSensor_handle = vrep.simxGetObjectHandle(
        clientID, "MiddleSensor#", vrep.simx_opmode_blocking)
    r, RightSensor_handle = vrep.simxGetObjectHandle(clientID, "RightSensor#",
                                                     vrep.simx_opmode_blocking)
    r, LeftJoint_handle = vrep.simxGetObjectHandle(clientID,
                                                   "DynamicLeftJoint#",
                                                   vrep.simx_opmode_blocking)
    r, RightJoint_handle = vrep.simxGetObjectHandle(clientID,
                                                    "DynamicRightJoint#",
                                                    vrep.simx_opmode_blocking)

    # Read all sensors and joints
    vrep.simxReadVisionSensor(clientID, LeftSensor_handle,
                              vrep.simx_opmode_streaming)
    vrep.simxReadVisionSensor(clientID, MiddleSensor_handle,
                              vrep.simx_opmode_streaming)
    vrep.simxReadVisionSensor(clientID, RightSensor_handle,
                              vrep.simx_opmode_streaming)
    vrep.simxGetJointPosition(clientID, LeftJoint_handle,
                              vrep.simx_opmode_streaming)
    vrep.simxGetJointPosition(clientID, RightJoint_handle,
                              vrep.simx_opmode_streaming)

    # Now retrieve streaming data (i.e. in a non-blocking fashion):
    startTime = time.time()
    while True:

        # Sense
        r, left, auxPackets = vrep.simxReadVisionSensor(
Example #22
0
    print("connection not successful")
    sys.exit('Could not connect')

errorCode, left_motor_handle = vrep.simxGetObjectHandle(
    clientID, 'left_joint', vrep.simx_opmode_oneshot_wait)  #初始化
errorCode, right_motor_handle = vrep.simxGetObjectHandle(
    clientID, 'right_joint', vrep.simx_opmode_oneshot_wait)

sensor_h = []  #empty list for handles

#for loop to retrieve sensor arrays and initiate sensors
for x in range(0, 5 + 1):
    errorCode, sensor_handle = vrep.simxGetObjectHandle(
        clientID, 'line_sensor' + str(x), vrep.simx_opmode_oneshot_wait)
    sensor_h.append(sensor_handle)  #keep list of handles
    errorCode, detectionState, auxPackets = vrep.simxReadVisionSensor(
        clientID, sensor_handle, vrep.simx_opmode_streaming)

Kp = 0.22
Ki = -0.0025
Kd = 0.00078
error = 0
previous_error = 0
I = 0
V_based = 2.3
SampleTime = 0.1
PID_value = 0

t = time.time()
t_cal = time.time()

while (time.time() - t) < 200:  #运行200s
                                               vrep.simx_opmode_blocking)

#############################################################################################################

#Initialize robot motion by assigning wheel velocity to each motor
vrep.simxSetJointTargetVelocity(clientID, Left_Motor, speed,
                                vrep.simx_opmode_oneshot)
vrep.simxSetJointTargetVelocity(clientID, Right_Motor, speed,
                                vrep.simx_opmode_oneshot)

#Initialize sensors
errorCode, IsObstacle, DistanceVector_left, ObstacleHandle, SurfaceNormalVector = vrep.simxReadProximitySensor(
    clientID, leftsensor, vrep.simx_opmode_streaming)
errorCode, IsObstacle, DistanceVector_front, ObstacleHandle, SurfaceNormalVector = vrep.simxReadProximitySensor(
    clientID, frontsensor, vrep.simx_opmode_streaming)
errorCode, detection, auxPackets = vrep.simxReadVisionSensor(
    clientID, frontcam, vrep.simx_opmode_streaming)
errorCode, detection, auxPackets = vrep.simxReadVisionSensor(
    clientID, rightcam, vrep.simx_opmode_streaming)

returnCode, resolution, buffer = vrep.simxGetVisionSensorDepthBuffer(
    clientID, frontcam, vrep.simx_opmode_streaming)
returnCode, resolution, buffer = vrep.simxGetVisionSensorDepthBuffer(
    clientID, rightcam, vrep.simx_opmode_streaming)

##############################################################################################################

k = 0
dist_opt = 1

while clientID != -1:
    errorCode, IsObstacle_left, left, ObstacleHandle_left, SurfaceNormalVector_left = vrep.simxReadProximitySensor(
Example #24
0
 def read(self):
     code, state, aux_packets = v.simxReadVisionSensor(
         self._id, self._handle, self._def_op_mode)
     return state, state, aux_packets
Example #25
0
                                                    left_motor_handle, v + u,
                                                    vrep.simx_opmode_streaming)
        errorCode = vrep.simxSetJointTargetVelocity(clientID,
                                                    right_motor_handle, v - u,
                                                    vrep.simx_opmode_streaming)

    elif error < 0:
        errorCode = vrep.simxSetJointTargetVelocity(clientID,
                                                    left_motor_handle, v + u,
                                                    vrep.simx_opmode_streaming)
        errorCode = vrep.simxSetJointTargetVelocity(clientID,
                                                    right_motor_handle, v - u,
                                                    vrep.simx_opmode_streaming)

    #получаем данные с лидара
    errorCode, detectionState, auxPackets1 = vrep.simxReadVisionSensor(
        clientID, vision_sensor1, vrep.simx_opmode_blocking)
    errorCode, detectionState, auxPackets2 = vrep.simxReadVisionSensor(
        clientID, vision_sensor2, vrep.simx_opmode_blocking)
    data = auxPackets1[1][1::2][0::4] + auxPackets2[1][1::2][0::4]
    data = data[1:68] + data[69:136]
    scan = list(np.array(data) * 1000)

    #считаем одометрию для обоих колес
    errorCode, x_left = vrep.simxGetJointPosition(clientID, left_motor_handle,
                                                  vrep.simx_opmode_streaming)
    dx_left = abs(x_left - prev_pos_left)
    prev_pos_left = x_left
    dx_left = (dx_left + math.pi) % (2 * math.pi) - math.pi

    errorCode, x_right = vrep.simxGetJointPosition(clientID,
                                                   right_motor_handle,
Example #26
0
def push():
    x_p = [[-0.125,0,0.05],[0.125,0,0.05]]
    x_n = [[0.125,0,0.05],[-0.125,0,0.05]]
    y_p = [[0,-0.125,0.05],[0,0.125,0.05]]
    y_n = [[0,0.125,0.05],[0,-0.125,0.05]]
    xy_p = [[-0.0884,-0.0884,0.05],[0.0884,0.0884,0.05]]
    xy_n = [[0.0884,0.0884,0.05],[-0.0884,-0.0884,0.05]]
    xp_yn = [[-0.0884,0.0884,0.05],[0.0884,-0.0884,0.05]]
    xn_yp = [[0.0884,-0.0884,0.05],[-0.0884,0.0884,0.05]]
    directions = [x_p,x_n,y_p,y_n,xy_p,xp_yn,xn_yp,xy_n]
            # add object
            object_name, object_handle = add_three_objects(clientID)
            time.sleep(5.0)
            num_obj = 3
            object_pos = []
            object_angle = []
            for obj_i in range(num_obj):
                res, pos = vrep.simxGetObjectPosition(clientID,object_handle[obj_i],-1,vrep.simx_opmode_oneshot_wait)
                res, orientation = vrep.simxGetObjectOrientation(clientID,object_handle[obj_i],-1,vrep.simx_opmode_oneshot_wait)
                object_pos.append(pos)
                object_angle.append(orientation)
            # Generate object pcd file from V-REP
            sim_ret, cam_handle = vrep.simxGetObjectHandle(clientID, 'kinect_depth', vrep.simx_opmode_blocking)
            emptyBuff = bytearray()
            res, retInts, retFloats, retStrings, retBuffer = vrep.simxCallScriptFunction(clientID, 'kinect',
                                                                                         vrep.sim_scripttype_childscript,
                                                                                         'absposition', [], [], [], emptyBuff,
                                                                                         vrep.simx_opmode_blocking)
            R = np.asarray([[retFloats[0], retFloats[1], retFloats[2], retFloats[3]],
                            [retFloats[4], retFloats[5], retFloats[6], retFloats[7]],
                            [retFloats[8], retFloats[9], retFloats[10], retFloats[11]]])
            # print('camera pose is: ',R)
            result, state, data = vrep.simxReadVisionSensor(clientID, cam_handle, vrep.simx_opmode_blocking)
            data = data[1]
            pointcloud = []
            for i in range(2, len(data), 4):
                p = [data[i], data[i + 1], data[i + 2], 1]
                pointcloud.append(np.matmul(R, p))
                # Object segmentation
            pcl = remove_clipping(pointcloud)
            # Save pcd file
            np.savetxt('data.pcd', pcl, delimiter=' ')
            insertHeader('data.pcd')
            nb_clutters = segmentation('data.pcd')
            label = [0,0,0,0,0,0,0,0]
            if nb_clutters == num_obj:
                continue
            print('Number of objects: %d' % nb_clutters)
            vg = transform(pcl)
            input_shape = vg.reshape((1,1,32,32,32))
            p = model.predict(input_shape, verbose=False)
            p = p[0]
            print('Predicted 8-dir success rate: ', p)
            best_dir = np.argmax(p)
            direction = directions[best_dir]
            print('The best direction is %d' % best_dir)
            f = open('predictions.txt', "a+")
            f.write(str(best_dir))
            f.close()
            tries = 1
            for direction in directions:
                res, target1 = vrep.simxGetObjectHandle(clientID, 'grasp', vrep.simx_opmode_oneshot_wait)
                res, target2 = vrep.simxGetObjectHandle(clientID, 'lift', vrep.simx_opmode_oneshot_wait)
                res, target3 = vrep.simxGetObjectHandle(clientID, 'lift0', vrep.simx_opmode_oneshot_wait)

                angles = [-3.14, 0, 0]
                # Set landing position
                res1 = vrep.simxSetObjectPosition(clientID, target1, -1, direction[0], vrep.simx_opmode_oneshot)
                res2 = vrep.simxSetObjectOrientation(clientID, target1, -1, angles, vrep.simx_opmode_oneshot)
                # Set pushing direction
                res3 = vrep.simxSetObjectPosition(clientID, target2, -1, direction[1], vrep.simx_opmode_oneshot)
                res4 = vrep.simxSetObjectOrientation(clientID, target2, -1, angles, vrep.simx_opmode_oneshot)
                # Set wait position
                res5 = vrep.simxSetObjectPosition(clientID, target3, -1, [direction[1][0],direction[1][1],direction[1][2]+0.15], vrep.simx_opmode_oneshot)
                res6 = vrep.simxSetObjectOrientation(clientID, target3, -1, angles, vrep.simx_opmode_oneshot)
                # Execute movements
                res, retInts, retFloats, retStrings, retBuffer = vrep.simxCallScriptFunction(clientID, 'Sphere',
                                                                                             vrep.sim_scripttype_childscript,
                                                                                             'go', [], [], [],
                                                                                             emptyBuff,
                                                                                             vrep.simx_opmode_blocking)
                print('execution signal sent')
                running = True
                while running:
                    res, signal = vrep.simxGetIntegerSignal(clientID, 'finish', vrep.simx_opmode_oneshot_wait)
                    if signal == tries:
                        running = False
                    else:
                        running = True
                print('recording data ...')
                # Recording data
                time.sleep(1.0)
                # After pushing
                result, state, new_data = vrep.simxReadVisionSensor(clientID, cam_handle, vrep.simx_opmode_blocking)
                new_data = new_data[1]
                new_pointcloud = []
                for i in range(2, len(data), 4):
                    p = [new_data[i], new_data[i + 1], new_data[i + 2], 1]
                    new_pointcloud.append(np.matmul(R, p))
                    # Object segmentation
                new_pcl = remove_clipping(new_pointcloud)
                np.savetxt('data_new.pcd', new_pcl, delimiter=' ')
                insertHeader('data_new.pcd')
                # v = pptk.viewer(new_pcl) # Visualize pcd if needed
                nb_clutters_new = segmentation('data_new.pcd')
                print('Number of objects: %d' % nb_clutters_new)
                if nb_clutters_new>nb_clutters:
                    dir_index = directions.index(direction)
                    print('Tried direction:', direction)
                    print('Number %d in directions list' % dir_index)
                    label[dir_index]=1
                    print('Updated label:', label)
                else:
                    print('Pushing not meaningful ...')
                for j in range(num_obj):
                    vrep.simxSetObjectPosition(clientID, object_handle[j], -1, object_pos[j], vrep.simx_opmode_oneshot_wait)
                    vrep.simxSetObjectOrientation(clientID, object_handle[j], -1, object_angle[j], vrep.simx_opmode_oneshot_wait)
                time.sleep(0.5)
                tries = tries+1
            print(label)
def main():
    print('Program started')
    vrep.simxFinish(-1)  # just in case, close all opened connections
    clientID = vrep.simxStart('127.0.0.1', 19997, True, True, 2000, 5)
    if clientID != -1:
        print('Connected to remote API server')

        emptyBuff = bytearray()

        # Start the simulation:
        vrep.simxStartSimulation(clientID, vrep.simx_opmode_oneshot_wait)

        # initiaize robot
        # Retrieve wheel joint handles:
        wheelJoints = np.empty(4, dtype=np.int)
        wheelJoints.fill(-1)  #front left, rear left, rear right, front right
        res, wheelJoints[0] = vrep.simxGetObjectHandle(
            clientID, 'rollingJoint_fl', vrep.simx_opmode_oneshot_wait)
        res, wheelJoints[1] = vrep.simxGetObjectHandle(
            clientID, 'rollingJoint_rl', vrep.simx_opmode_oneshot_wait)
        res, wheelJoints[2] = vrep.simxGetObjectHandle(
            clientID, 'rollingJoint_rr', vrep.simx_opmode_oneshot_wait)
        res, wheelJoints[3] = vrep.simxGetObjectHandle(
            clientID, 'rollingJoint_fr', vrep.simx_opmode_oneshot_wait)

        # set wheel velocity to 0
        for i in range(0, 4):
            vrep.simxSetJointTargetVelocity(clientID, wheelJoints[i], 0,
                                            vrep.simx_opmode_oneshot)

        #ste first wheel velocities to 5 m/s for 2 s

        r = 50.0  #radius of wheels
        l1 = 300.46 / 2.0  #D/2
        l2 = 471.0 / 2.0  #C/2
        """
        vx = 0  
        vy = 0
        w0 = 1
	        
        vel1 = 1/r * (1*vx + 1 * vy -(l1+l2)*w0)  
        vel2 = 1/r * (1*vx  -1 * vy +(l1+l2)*w0) 
        vel3 = 1/r * (1*vx  -1 * vy -(l1+l2)*w0) 
        vel4 = 1/r * (1*vx + 1 * vy +(l1+l2)*w0) 
        """

        res, base = vrep.simxGetObjectHandle(clientID, 'youBot_center',
                                             vrep.simx_opmode_oneshot_wait)
        res, base_pos = vrep.simxGetObjectPosition(
            clientID, base, -1, vrep.simx_opmode_oneshot_wait)
        res, base_orient = vrep.simxGetObjectOrientation(
            clientID, base, -1, vrep.simx_opmode_oneshot_wait)
        vrep.simxGetPingTime(clientID)

        #print(base,"\n",base_pos,"\n",base_orient,"\n")

        res = vrep.simxSetIntegerSignal(clientID, 'handle_xy_sensor', 2,
                                        vrep.simx_opmode_oneshot)

        vrep.simxSetIntegerSignal(clientID, 'displaylasers', 1,
                                  vrep.simx_opmode_oneshot)

        res, hokuyo1 = vrep.simxGetObjectHandle(clientID, 'fastHokuyo_sensor1',
                                                vrep.simx_opmode_oneshot_wait)
        res, hokuyo2 = vrep.simxGetObjectHandle(clientID, 'fastHokuyo_sensor2',
                                                vrep.simx_opmode_oneshot_wait)

        res, goalBase = vrep.simxGetObjectHandle(clientID, 'Goal',
                                                 vrep.simx_opmode_oneshot_wait)
        res, goal = vrep.simxGetObjectPosition(clientID, goalBase, -1,
                                               vrep.simx_opmode_oneshot_wait)
        #print(goal)

        #turn around in staart pos
        #wheelVelocities = vel(0,0,1.0,r,l1,l2)
        #drive(wheelVelocities, math.pi, clientID,wheelJoints)

        #aim for goal
        angle = aimGoal(base_pos, goal, base_orient)
        wheelVelocities = vel(0, 0, 1.0, r, l1, l2)
        drive(wheelVelocities, angle[2], clientID, wheelJoints)

        res, aux, auxD = vrep.simxReadVisionSensor(clientID, hokuyo1,
                                                   vrep.simx_opmode_streaming)
        res, aux, auxD = vrep.simxReadVisionSensor(clientID, hokuyo1,
                                                   vrep.simx_opmode_buffer)
        print(auxD)
        #print(math.atan2(-3,3))
        #print(angle[2])
        """
        #7 straight 
        
        wheelVelocities = vel(-0.5,0,0,r,l1,l2)
        drive(wheelVelocities, 14.0,clientID, wheelJoints)
        #turn 
        wheelVelocities = vel(0,0,1.0,r,l1,l2)
        drive(wheelVelocities, math.pi, clientID,wheelJoints)

        #7 straight 
        
        wheelVelocities = vel(-0.5,0,0,r,l1,l2)
        drive(wheelVelocities, 14, clientID,wheelJoints)
        #turn 
        wheelVelocities = vel(0,0,1.0,r,l1,l2)
        drive(wheelVelocities, math.pi,clientID,wheelJoints)

        #7 straight 
        
        wheelVelocities = vel(-0.5,0,0,r,l1,l2)
        drive(wheelVelocities, 14, clientID,wheelJoints)
        #turn 
        wheelVelocities = vel(0,0,1.0,r,l1,l2)
        drive(wheelVelocities, math.pi, clientID,wheelJoints)

        #7 straight 
        
        wheelVelocities = vel(-0.5,0,0,r,l1,l2)
        drive(wheelVelocities, 14, clientID,wheelJoints)
        #turn 
        wheelVelocities = vel(0,0,1.0,r,l1,l2)
        drive(wheelVelocities, math.pi, clientID,wheelJoints)

        """
        res, base = vrep.simxGetObjectHandle(clientID, 'youBot_center',
                                             vrep.simx_opmode_oneshot_wait)
        base_pos = vrep.simxGetObjectPosition(clientID, base, -1,
                                              vrep.simx_opmode_oneshot_wait)
        base_orient = vrep.simxGetObjectOrientation(
            clientID, base, -1, vrep.simx_opmode_oneshot_wait)
        vrep.simxGetPingTime(clientID)

        # print(base,"\n",base_pos,"\n",base_orient,"\n")

        # set wheel velocity to 0
        for i in range(0, 4):
            vrep.simxSetJointTargetVelocity(clientID, wheelJoints[i], 0,
                                            vrep.simx_opmode_oneshot)

        # Stop simulation:
        vrep.simxStopSimulation(clientID, vrep.simx_opmode_oneshot_wait)

        # Now close the connection to V-REP:
        vrep.simxFinish(clientID)
    else:
        print('Failed connecting to remote API server')
    print('Program ended')
Example #28
0
    print('Connection not successful')
    sys.exit('Could not connect')

#Getting motor handles
errorCode, left_motor_handle = vrep.simxGetObjectHandle(
    clientID, "left_joint", vrep.simx_opmode_oneshot_wait)
errorCode, right_motor_handle = vrep.simxGetObjectHandle(
    clientID, "right_joint", vrep.simx_opmode_oneshot_wait)
sensor_h = []  #handles list
sensor_val = []  #Sensor value list
#Getting sensor handles list
for x in range(0, 7):
    errorCode, sensor_handle = vrep.simxGetObjectHandle(
        clientID, 'line_sensor' + str(x), vrep.simx_opmode_oneshot_wait)
    sensor_h.append(sensor_handle)  #It is adding sensor handle values
    errorCode, detectionstate, sensorreadingvalue = vrep.simxReadVisionSensor(
        clientID, sensor_h[x], vrep.simx_opmode_streaming)
    sensor_val.append(
        1.0
    )  #It is adding 1.0 to fill the sensor values on the list. In the while loop it is going to overwrite the values
maxkiirus = 15  #Maximal speed
previouserror = 0  #previous error
error = 0  #error
integral = 0  # PID values before while loop
derivative = 0
proportional = 0
time.sleep(1)
P = 0.3
I = 0.0003
D = 0.5
t = time.time()  #It is saving the time which is now
summ = 0
Example #29
0
#retrieve motor  handles
errorCode, left_motor_handle = vrep.simxGetObjectHandle(
    clientID, 'Pioneer_p3dx_leftMotor', vrep.simx_opmode_oneshot_wait)
errorCode, right_motor_handle = vrep.simxGetObjectHandle(
    clientID, 'Pioneer_p3dx_rightMotor', vrep.simx_opmode_oneshot_wait)

# Get visionSensorHandle
visionSensorHandleReturnCode, visionSensorHandle = vrep.simxGetObjectHandle(
    clientID, 'cam1', vrep.simx_opmode_oneshot_wait)

pioneerHandleReturnCode, pioneerHandle = vrep.simxGetObjectHandle(
    clientID, 'Pioneer_p3dx', vrep.simx_opmode_oneshot_wait)

# Setup visionSensorValues
visionSensorReturnCode, visionSensorDetectionState, visionSensorAuxPackets = vrep.simxReadVisionSensor(
    clientID, visionSensorHandle, vrep.simx_opmode_streaming)
pioneerVelocityReturnCode, pioneerVelocityLinearVelocity, pioneerVelocityAngularVelocity = vrep.simxGetObjectVelocity(
    clientID, pioneerHandle, vrep.simx_opmode_streaming)

sensor_h = []  #empty list for handles
sensor_val = np.array([])  #empty array for sensor measurements

#orientation of all the sensors:
sensor_loc = np.array([
    -PI / 2, -50 / 180.0 * PI, -30 / 180.0 * PI, -10 / 180.0 * PI,
    10 / 180.0 * PI, 30 / 180.0 * PI, 50 / 180.0 * PI, PI / 2, PI / 2,
    130 / 180.0 * PI, 150 / 180.0 * PI, 170 / 180.0 * PI, -170 / 180.0 * PI,
    -150 / 180.0 * PI, -130 / 180.0 * PI, -PI / 2
])

#for loop to retrieve sensor arrays and initiate sensors
Example #30
0
def run(x1, x2, x3):  #PID仿真函数
    k = 0
    viivitus = 0
    sensor_val[6] = 0
    while (sensor_val[6] < 0.95):
        if k == 0:
            maxkiirus = 15  #Maximal speed
            previouserror = 0  #previous error
            error = 0  #error
            integral = 0  # PID values before while loop
            derivative = 0
            proportional = 0
            time.sleep(1)
            t = time.time()  #It is saving the time which is now
            summ = 0

        summa = 0
        andur = 0
        for x in range(0, 6):
            errorCode, detectionstate, sensorreadingvalue = vrep.simxReadVisionSensor(
                clientID, sensor_h[x], vrep.simx_opmode_buffer)
            sensor_val[x] = sensorreadingvalue[1][
                0]  #It is overwriting the sensor values
            andur = andur + convertsensor(
                sensor_val[x])  #It is adding the sensor values by each other
            summa = summa + convertsensor(
                sensor_val[x]
            ) * x * 10.0  #Calculating the sum where the robots position is

        if andur == 0 and previouserror > 0:  #When no sensors doesn't see and previous error is bigger than zero
            error = maxkiirus  # Error is going to be maximal speed
        elif andur == 0 and previouserror < 0:  #When no sensors doesn't see and previous error is lower than zero
            error = -maxkiirus  # Error is going to be -maximal speed
        else:
            if andur == 0:  # When sensor doesn't see
                positsioon = proportional  # Position is going to be proportional error
            else:
                positsioon = summa / andur  # otherwise position is the division between sum and sensor values
            #print( "Positsioon on :", positsioon)
            proportional = positsioon - 25  # Proportional error
            summ = summ + proportional * proportional

            #print( "viivitus :",viivitus)
            integral = integral + proportional  # Integral error
            if integral > 2500:  #When integral value is going to be too big
                integral = 0  #integral is set to zero
            elif integral < -2500:
                integral = 0
        #   print "Integraal on :", integral
            derivative = proportional - previouserror  # Derivative error - in reality this has no effect.
            #   print "Derivatiivne on :", derivative
            previouserror = proportional  #It is remembering previous error
            error = proportional * x1 + integral * x2 + derivative * x3  #Calculating overall error using PID.

        #  print "viivitus on: ", viivitus
        #When position (error) is to big/small, then the speed is set max/-max.
        if error > maxkiirus:  #When error is bigger than maximal speed,
            error = maxkiirus  #error is set to max speed
        elif error < -maxkiirus:  # It is because the wheels shouldn't rotate backwards.
            error = -maxkiirus
        if error < 0:  # In here it is calculating the speed of the wheels
            omega_right = maxkiirus + error
            omega_left = maxkiirus
        else:
            omega_right = maxkiirus
            omega_left = maxkiirus - error

        errorCode = vrep.simxSetJointTargetVelocity(clientID,
                                                    left_motor_handle,
                                                    omega_left,
                                                    vrep.simx_opmode_streaming)
        errorCode = vrep.simxSetJointTargetVelocity(clientID,
                                                    right_motor_handle,
                                                    omega_right,
                                                    vrep.simx_opmode_streaming)
        k = k + 1
        errorCode, detectionstate, sensorreadingvalue = vrep.simxReadVisionSensor(
            clientID, sensor_h[6], vrep.simx_opmode_buffer)
        sensor_val[6] = sensorreadingvalue[1][0]
        viivitus = round((time.time() - t), 5)  #calculating delay time
        if viivitus < 3:  #3秒内不检测终点位置
            sensor_val[6] = 0

#   print(" hfhg  ",sensor_val[6])
#   print(" viivitus ",viivitus)
    errorCode = vrep.simxSetJointTargetVelocity(clientID, left_motor_handle, 0,
                                                vrep.simx_opmode_streaming)
    errorCode = vrep.simxSetJointTargetVelocity(clientID, right_motor_handle,
                                                0, vrep.simx_opmode_streaming)
    return summ
Example #31
0
            vrep.sim_buttonproperty_staydown + vrep.sim_buttonproperty_isdown,
            vrep.simx_opmode_oneshot)
    if middle:
        vrep.simxSetUIButtonProperty(
            clientID, elHandle, 16,
            vrep.sim_buttonproperty_staydown + vrep.sim_buttonproperty_isdown,
            vrep.simx_opmode_oneshot)
    if right:
        vrep.simxSetUIButtonProperty(
            clientID, elHandle, 24,
            vrep.sim_buttonproperty_staydown + vrep.sim_buttonproperty_isdown,
            vrep.simx_opmode_oneshot)


sensorReading = [False, False, False]
sensorReading[0] = (vrep.simxReadVisionSensor(clientID, leftSensor,
                                              vrep.simx_opmode_streaming) == 1)
sensorReading[1] = (vrep.simxReadVisionSensor(clientID, middleSensor,
                                              vrep.simx_opmode_streaming) == 1)
sensorReading[2] = (vrep.simxReadVisionSensor(clientID, rightSensor,
                                              vrep.simx_opmode_streaming) == 1)

while time.time() - startTime < 50:
    returnCode, data = vrep.simxGetIntegerParameter(
        clientID, vrep.sim_intparam_mouse_x,
        vrep.simx_opmode_buffer)  # Try to retrieve the streamed data

    # Read the sensors:

    sensorReading[0] = (vrep.simxReadVisionSensor(clientID, leftSensor,
                                                  vrep.simx_opmode_buffer)[1])
    sensorReading[1] = (vrep.simxReadVisionSensor(clientID, middleSensor,