def verificaConexao(self):
        conectado = sim.simxGetConnectionId(self.id) != -1
        while not conectado:
            sim.simxFinish(-1)
            self.id = sim.simxStart('127.0.0.1', 19999, True, True, 5000,
                                    5)  # Connect to CoppeliaSim
            conectado = sim.simxGetConnectionId(self.id) != -1

        return conectado
Exemple #2
0
def vision():
    while (sim.simxGetConnectionId(clientID) != -1):
        err, resolution, image = sim.simxGetVisionSensorImage(
            clientID, v1, 0, sim.simx_opmode_buffer)
        if err == sim.simx_return_ok:
            img = np.array(image, dtype=np.uint8)
            img.resize([resolution[1], resolution[0], 3])
            # image was originally upside down, turn it 180 degree
            img180 = cv2.flip(img, 0)

            #convert image from bgr -> rgb
            imgfinal = cv2.cvtColor(img180, cv2.COLOR_RGB2BGR)

            # show image
            cv2.imshow('image', imgfinal)

            # press 'q' to exit
            if cv2.waitKey(1) & 0xFF == ord('q'):
                break
            elif err == sim.simx_return_novalue_flag:
                print('no image')
                pass
            else:
                print(err)
    else:
        print("Failed to connect to remote API Server")
        sim.simxFinish(clientID)
Exemple #3
0
def main():
    print('### Program started')

    print('### Number of arguments:', len(sys.argv), 'arguments.')
    print('### Argument List:', str(sys.argv))

    sim.simxFinish(-1)  # just in case, close all opened connections

    port = int(sys.argv[1])
    clientID = sim.simxStart('127.0.0.1', port, True, True, 2000, 5)

    if clientID == -1:
        print('### Failed connecting to remote API server')

    else:
        print('### Connected to remote API server')
        hRobot = getRobotHandles(clientID)

        while sim.simxGetConnectionId(clientID) != -1:
            # Perception
            sonar = getSonar(clientID, hRobot)
            #print ('### s', sonar)

            blobs, coord = getImageBlob(clientID, hRobot)

            nspeed = 1.25

            if blobs == 1:
                if coord[0] > 0.5:
                    pd = abs(0.5 - coord[0]) / 0.5
                    pi = 0
                else:
                    pi = (0.5 - coord[0]) / 0.5
                    pd = 0

                if coord[1] >= 0.6:
                    res = 0.5
                else:
                    res = 0

                print('pd= ', pd, 'pi= ', pi, 'Y= ', coord[1])
                lspeed, rspeed = nspeed + (1.5 * pd) - res, nspeed + (1.5 *
                                                                      pi) - res

            else:
                lspeed, rspeed = avoid(sonar)
                #lspeed, rspeed = +1.5,+0

            # Planning
            #lspeed, rspeed = avoid(sonar)

            # Action
            setSpeed(clientID, hRobot, lspeed, rspeed)
            time.sleep(0.1)

        print('### Finishing...')
        sim.simxFinish(clientID)

    print('### Program ended')
Exemple #4
0
def streamVisionSensor():

    # Mouse function
    def select_point(event, x, y, flags, params):
        global point, point_selected, old_points
        if event == cv2.EVENT_LBUTTONDOWN:
            point = (x, y)
            point_selected = True
            old_points = np.array([[x, y]], dtype=np.float32)
            print("TRU")

    cv2.namedWindow('frame')

    point_selected = False
    point = ()
    old_points = np.array([[]])
    #Get the handle of vision sensor
    errorCode, visionSensorHandle = sim.simxGetObjectHandle(
        clientID, 'Vision_sensor', sim.simx_opmode_oneshot_wait)
    #Get the image
    errorCode, resolution, image = sim.simxGetVisionSensorImage(
        clientID, visionSensorHandle, 0, sim.simx_opmode_streaming)
    time.sleep(0.5)

    errorCode, resolution, image = sim.simxGetVisionSensorImage(
        clientID, visionSensorHandle, 0, sim.simx_opmode_buffer)
    sensorImage = np.array(image, dtype=np.uint8)
    sensorImage.resize([resolution[1], resolution[0], 3])
    old_image = sensorImage.copy()

    while (sim.simxGetConnectionId(clientID) != -1):
        cv2.setMouseCallback('frame', select_point)

        #Get the image of the vision sensor
        errorCode, resolution, image = sim.simxGetVisionSensorImage(
            clientID, visionSensorHandle, 0, sim.simx_opmode_buffer)
        #Transform the image so it can be displayed using pyplot
        sensorImage = np.array(image, dtype=np.uint8)
        sensorImage.resize([resolution[1], resolution[0], 3])
        displayedImage = cv2.resize(sensorImage, (480, 480))

        if point_selected is True:
            print("coco")
            new_points, status, error = cv2.calcOpticalFlowPyrLK(
                old_image, sensorImage, old_points, None, **lk_params)
            old_image = sensorImage.copy()  #current frame becomes previous
            old_points = new_points  #current x,y points become previous
            x, y = new_points.ravel()
            cv2.circle(sensorImage, (x, y), 8, (0, 0, 0), -1)

        cv2.imshow('frame', displayedImage)
        if cv2.waitKey(30) & 0xFF == ord('q'):
            break
    cv2.destroyAllWindows()
    print('End of Simulation')
Exemple #5
0
def collision():
    errorCode, car_collision_handle = sim.simxGetCollisionHandle(
        clientID, 'Collision', sim.simx_opmode_oneshot_wait)
    number_collisions = 0
    if errorCode != sim.simx_return_ok:
        print('Error: ', errorCode)
    while (sim.simxGetConnectionId(clientID) != -1):
        returnCode, collisionState = sim.simxReadCollision(
            clientID, car_collision_handle, sim.simx_opmode_streaming)
        if (collisionState != 0):
            number_collisions += 1
            print("Collision {0} occurred".format(str(number_collisions)))
 def update_status(self):
     """
     check the status of the simulator
     """
     self.status_has_changed = False
     self.is_online = False
     if sim.simxGetConnectionId(
             self.clientID) != -1 and self.clientID != -1:
         success, self.status = sim.simxGetIntegerSignal(
             self.clientID, 'Evo-Client Status', mode4)
         self.is_online = True
         if self.status != self.__old_status: self.status_has_changed = True
         self.__old_status = self.status
         # -1 = not ready/error, 0 = ready, 1 = active, 2 = done
     return self.status
Exemple #7
0
def image_processor(clientID):
    pub = rospy.Publisher('offset', Float64, queue_size=10)
    rospy.init_node('image_processor_node')
    rate = rospy.Rate(10)
    res, handle_cam = sim.simxGetObjectHandle(clientID, "cam",
                                              sim.simx_opmode_blocking)
    while not rospy.is_shutdown():
        err, resolution, frame = sim.simxGetVisionSensorImage(
            clientID, handle_cam, 0, sim.simx_opmode_streaming)
        if (sim.simxGetConnectionId(clientID) != -1):
            err, resolution, frame = sim.simxGetVisionSensorImage(
                clientID, handle_cam, 0, sim.simx_opmode_buffer)
            frame = frame[::-1]  # original was bgr, I converted to rgb
            if err == sim.simx_return_ok:
                img = np.array(frame, dtype=np.uint8)
                img.resize([resolution[0], resolution[1], 3])

                # detect red cube
                hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)

                l_b = np.array([0, 100, 100])
                u_b = np.array([10, 255, 255])

                mask = cv2.inRange(hsv, l_b, u_b)

                contours, _ = cv2.findContours(mask, cv2.RETR_TREE,
                                               cv2.CHAIN_APPROX_NONE)
                if len(contours) != 0:
                    (x, y, w, h) = cv2.boundingRect(contours[0])
                    x_medium = (2.0 * x +
                                w) / 2.0  # find the center of the object
                    object_center = (
                        resolution[0]
                    ) / 2.0 - x_medium  # calculate offset between object's center and the frame center
                    pub.publish(object_center)
                    #rospy.loginfo(res)

                    #cv2.line(img, (int(x_medium), 0), (int(x_medium), resolution[0]), (0, 0, 255), 1)
                else:
                    pub.publish(0.0)
                rate.sleep()

                #cv2.imshow('image', img)
                #if cv2.waitKey(1) & 0xFF == ord('q'):
                #break
        else:
            print("Failed to connect to the remote API server...")
            sim.simxFinish(clientID)
 def recover(self):
     """
     attempt to restore connection to a lost client
     """
     if self.clientID != -1:
         # this check is necessary,
         # otherwise a client that was not online when created (ID == -1) would lead to simxFinish(-1),
         # which kills ALL connections
         sim.simxFinish(self.clientID)
     new_clientID = sim.simxStart(self.ip, self.port, False, True,
                                  client_connection_timeout,
                                  client_comm_cycle_time)
     # sim.simxStart(connectionAddress, connectionPort, waitUntilConnected, doNotReconnectOnceDisconnected,
     # timeOutInMs, commThreadCycleInMs)
     if sim.simxGetConnectionId(self.clientID) != -1 and new_clientID != -1:
         self.clientID = new_clientID
         self.health = self.health_initial
         self.is_online = True
         success = True
     else:
         success = False
     return success
Exemple #9
0
 # get vision sensor objects
 res, v0 = sim.simxGetObjectHandle(clientID, 'vs1',
                                   sim.simx_opmode_oneshot_wait)
 res, v1 = sim.simxGetObjectHandle(clientID, 'vs2',
                                   sim.simx_opmode_oneshot_wait)
 err, resolution, image = sim.simxGetVisionSensorImage(
     clientID, v0, 0, sim.simx_opmode_streaming)
 #get motor objects
 err, Ball_handle = sim.simxGetObjectHandle(
     clientID, 'Ball', sim.simx_opmode_oneshot_wait)
 err, player_x_handle = sim.simxGetObjectHandle(
     clientID, 'player_x_joint', sim.simx_opmode_oneshot_wait)
 err, player_y_handle = sim.simxGetObjectHandle(
     clientID, 'player_y_joint', sim.simx_opmode_oneshot_wait)
 #main loop
 while (sim.simxGetConnectionId(clientID) != -1):
     #get vision sensor image
     start = time.time()
     err, resolution, image = sim.simxGetVisionSensorImage(
         clientID, v0, 0, sim.simx_opmode_buffer)
     #print(len(image))
     if err == sim.simx_return_ok:
         image_byte_array = array.array('b', image)
         image_buffer = I.frombuffer("RGB",
                                     (resolution[0], resolution[1]),
                                     bytes(image_byte_array), "raw",
                                     "RGB", 0, 1)
         img2 = numpy.asarray(image_buffer)
         #ColorRecognition
         if video_tranfer == 1:
             ret_yellow = color.track_yellow_object(img2)
Exemple #10
0
    res = vrep.simxSetObjectOrientation(clientID, handle, -1, ori0,
                                        vrep.simx_opmode_oneshot_wait)


if (clientID != -1):
    print('Connected to remote API server')
    res, v0 = vrep.simxGetObjectHandle(clientID, 'Chessboard',
                                       vrep.simx_opmode_oneshot_wait)
    # res, vp = vrep.simxGetObjectParent(clientID, v0, vrep.simx_opmode_oneshot_wait)
    res, pos0 = vrep.simxGetObjectPosition(clientID, v0, -1,
                                           vrep.simx_opmode_oneshot_wait)
    res, ori0 = vrep.simxGetObjectOrientation(clientID, v0, -1,
                                              vrep.simx_opmode_oneshot_wait)
    print(pos0, ori0)
    while (vrep.simxGetConnectionId(clientID) != -1):
        #############################original#############################
        res = vrep.simxSetObjectPosition(clientID, v0, -1, pos0,
                                         vrep.simx_opmode_oneshot_wait)
        res = vrep.simxSetObjectOrientation(clientID, v0, -1, ori0,
                                            vrep.simx_opmode_oneshot_wait)
        time.sleep(t)
        rotation_combinations(v0)

        #############################layer 02#############################
        res = vrep.simxSetObjectPosition(clientID, v0, v0, (0, 0, -0.2),
                                         vrep.simx_opmode_oneshot_wait)
        #   res = vrep.simxSetObjectOrientation(clientID, v0, -1, ori0, vrep.simx_opmode_oneshot_wait)
        time.sleep(t)
        rotation_combinations(v0)
Exemple #11
0
    def track_motion(self):
        res, resolution, frame1 = sim.simxGetVisionSensorImage(
            self.clientID, self.handle_cam, 0, sim.simx_opmode_streaming)

        # wait until frame is read correctly
        while res != sim.simx_return_ok:
            res, resolution, frame1 = sim.simxGetVisionSensorImage(
                self.clientID, self.handle_cam, 0, sim.simx_opmode_buffer)
        res, resolution, frame2 = sim.simxGetVisionSensorImage(
            self.clientID, self.handle_cam, 0, sim.simx_opmode_buffer)

        # convert from BGR to RGB, also the position of the image
        frame1 = frame1[::-1]
        frame1 = np.array(frame1, dtype=np.uint8)
        frame1.resize([resolution[0], resolution[1], 3])

        frame2 = frame2[::-1]
        frame2 = np.array(frame2, dtype=np.uint8)
        frame2.resize([resolution[0], resolution[1], 3])

        # Process the video and detect motion by subtracting subsequent frames
        while not rospy.is_shutdown():
            if sim.simxGetConnectionId(self.clientID) != -1:
                diff = cv2.absdiff(frame1, frame2)
                gray = cv2.cvtColor(diff, cv2.COLOR_BGR2GRAY)
                blur = cv2.GaussianBlur(gray, (5, 5), 0)
                _, thresh = cv2.threshold(blur, 20, 255, cv2.THRESH_BINARY)
                dilated = cv2.dilate(thresh, None, iterations=3)
                contours, _ = cv2.findContours(dilated, cv2.RETR_TREE,
                                               cv2.CHAIN_APPROX_SIMPLE)

                counter = 0
                centerX = 0

                for contour in contours:
                    (x, y, w, h) = cv2.boundingRect(contour)
                    if w >= h or cv2.contourArea(
                            contour
                    ) < 1000:  # because human's height is greater than width
                        continue
                    counter = counter + 1
                    centerX = centerX + (2.0 * x + w) / 2.0
                    #cv2.rectangle(frame1, (x, y), (x+w, y+h), (0, 255, 0), 1)

                if counter != 0:
                    centerX = centerX / counter
                    self.offset = (resolution[0] / 2.0) - centerX
                else:
                    self.offset = 0.0

                self.pub.publish(
                    self.offset)  # publish the offset from the center

                cv2.imshow("camera", frame1)

                # update variables
                frame1 = frame2
                res, resolution, frame2 = sim.simxGetVisionSensorImage(
                    self.clientID, self.handle_cam, 0, sim.simx_opmode_buffer)

                frame2 = frame2[::-1]
                frame2 = np.array(frame2, dtype=np.uint8)
                frame2.resize([resolution[0], resolution[1], 3])

                if cv2.waitKey(40) == 27:
                    break
            else:
                print("Failed to connect to the remote API server...")
                sim.simxFinish(self.clientID)
Exemple #12
0
    res_gnd, resolution_gnd, image_gnd = sim.simxGetVisionSensorImage(
        clientID_gnd, camera_gnd, 0, sim.simx_opmode_streaming)

    # Flags that allow each of the steps to excecute only when necessary
    isDroneCentered = False
    isMapReady = False
    isMovingToManta = False
    isLookingForBear = False
    isRescueDone = False
    isReadyToSearch = False
    isGoingBackToManta = False
    isReturningMapReady = False
    isWaitingForMap = False
    isGoingBackToHospital = False

    while (sim.simxGetConnectionId(clientID_drone) !=
           -1) and (sim.simxGetConnectionId(clientID_gnd) != -1):

        # -------------------------------------- DRONE MOVES TO THE CENTRE OF THE SCENE --------------------------------------

        # Moving drone to the center of the environment
        if (not isDroneCentered):
            drone_viewposition, repeatseed, isDroneCentered = fun.droneInitialMovement(
                clientID_drone, drone_base_handle, drone_target_handle, floor,
                drone_viewposition, repeatseed)

        # If we don't have a path
        if (not isMapReady) and isDroneCentered:
            # Give time for the drone to stabilize
            time.sleep(40)
            # Repeat until getting the instructions (commands) list
Exemple #13
0
def main():
    print('Program started')

    sim.simxFinish(-1)  # just in case, close all opened connections
    clientID = sim.simxStart(COPPELIA_SIM_IP_ADDRESS, COPPELIA_SIM_PORT, True,
                             True, 2000, 5)  # Connect to CoppeliaSim

    # lower e upper são vetores pra detecção da cor vermelha, cada elemento do vetor é um dos canais RGB (invertido por causa do opencv).
    # então o limite inferior pra detecção seria o valor 0 para a cor azul, 0 para a cor verde e 100 para a cor vermelha.
    # a análise é análoga para o vetor upper.
    lower = np.uint8([0, 0, 100])
    upper = np.uint8([60, 60, 255])

    # nós temos 8 targets, portanto para pegar as suas referências (handles), é preciso iterar sobre eles, daí vem o targetIterator.
    targetIterator = 1

    if (clientID != -1):

        print('Connection Successful')

        # pega as referências de todos os elementos a serem manipulados.
        # perceba que só pega a referência do primeiro alvo porque só pegamos a referência de um alvo por vez.
        err1, ddRobotHandle = sim.simxGetObjectHandle(
            clientID, 'RobotFrame#', sim.simx_opmode_oneshot_wait)
        err2, leftMotorHandle = sim.simxGetObjectHandle(
            clientID, 'LeftMotor#', sim.simx_opmode_oneshot_wait)
        err3, rightMotorHandle = sim.simxGetObjectHandle(
            clientID, 'RightMotor#', sim.simx_opmode_oneshot_wait)
        err4, revoluteJointHandle = sim.simxGetObjectHandle(
            clientID, 'RevoluteJoint#', sim.simx_opmode_oneshot_wait)
        err5, visionSensorHandle = sim.simxGetObjectHandle(
            clientID, 'VisionSensor#', sim.simx_opmode_oneshot_wait)
        err6, targetHandle = sim.simxGetObjectHandle(
            clientID, 'Target' + str(targetIterator) + '#',
            sim.simx_opmode_oneshot_wait)
        targetIterator += 1

        # pega a referência dos sensores ultrassônicos (usamos 7 sensores).
        # atenção: sensores ultrassônicos != vision sensor
        usensors = []
        for i in range(1, 8):
            err, usensor = sim.simxGetObjectHandle(
                clientID, 'Proximity_sensor' + str(i) + '#',
                sim.simx_opmode_oneshot_wait)
            usensors.append(usensor)

        # a leitura dos sensores não pode bloquear o loop de execução do robô (while mais abaixo), portanto o coppelia oferece o opmode streaming,
        # isto é, quando você manda a leitura começar a ser feita com streaming, ele mantém-se fazendo a leitura e colocando os resultados em um buffer
        # exclusivo para aquele handle (7 handles 7 buffers). Portanto só precisamos a cada iteração ler do buffer, e aí não é preciso pedir que o simulador
        # faça uma nova leitura, economizando tempo e não bloqueando o loop de execução do robô.
        for i in range(7):
            x, a, b, c, d = sim.simxReadProximitySensor(
                clientID, usensors[i], sim.simx_opmode_streaming)

        # a mesma regra dos sensores ultrassônicos descrita acima se aplica ao vision sensor (e a qualquer sensor que poderia vir a ser usado).
        a, b, image = sim.simxGetVisionSensorImage(clientID,
                                                   visionSensorHandle, 0,
                                                   sim.simx_opmode_streaming)

        print(f'RobotFrame: {ddRobotHandle}')
        print(f'LeftMotor: {leftMotorHandle}')
        print(f'RightMotor: {rightMotorHandle}')
        print(f'RevoluteJoint: {revoluteJointHandle}')
        print(f'VisionSensor: {visionSensorHandle}')

        ret = sim.simxStartSimulation(clientID, sim.simx_opmode_oneshot_wait)
        if (ret == -1):
            print('Connection Failed.')
            exit(-1)
        print('Simulation Started')

        while (sim.simxGetConnectionId(clientID) != -1):
            # posicao atual do robo
            pos = getPosition(clientID, ddRobotHandle)
            #print(f'posX: {pos[0]:.3f} posY: {pos[1]:.3f}', end=' ')
            # target atual do robo
            goal = getPosition(clientID, targetHandle)
            #print(f'goalX: {goal[0]:.3f} goalY: {goal[1]:.3f}', end=' ')

            # observe o simx_opmode_buffer. nós não pedimos uma nova leitura do vision sensor, nós simplesmente pegamos a leitura do buffer.
            a, b, image = sim.simxGetVisionSensorImage(clientID,
                                                       visionSensorHandle, 0,
                                                       sim.simx_opmode_buffer)
            # operações feitas para organizar a imagem do vision sensor.
            newImage = np.uint8(image).reshape((b[0], b[0], 3))
            # definitiveImage é autoexplicativa, mas atenção, pois ela se encontra com a codificação BGR.
            definitiveImage = cv2.cvtColor(np.flip(newImage, 0),
                                           cv2.COLOR_RGB2BGR)
            # mask é simplesmente uma outra imagem com a mesma resolução de definitiveImage, porém só mostrando objetos vermelhos, isto é,
            # valor 255 nos pixels correspondentes a pixels vermelhos na imagem original, e 0 em todos os outros pixels.
            mask = cv2.inRange(definitiveImage, lower, upper)

            # distancia do alvo para o robô.
            dx = goal[0] - pos[0]
            dy = goal[1] - pos[1]
            euclid = math.sqrt(dx * dx + dy * dy)
            # print(euclid)

            # se o alvo está perto o suficiente faça:
            if (euclid <= 0.23):
                # se na mask só tem pixels diferentes de 0, portanto só pixels vermelhos faça:
                if (0.0 not in mask):
                    # para o robô e manda a hélice girar no sentido anti-horário por aproximadamente três segundos.
                    phiL, phiR, phiH = 0, 0, 2
                    setTargetSpeed(clientID, phiL, phiR, phiH, leftMotorHandle,
                                   rightMotorHandle, revoluteJointHandle)
                    time.sleep(3)
                    setTargetSpeed(clientID, phiL, phiR, 0, leftMotorHandle,
                                   rightMotorHandle, revoluteJointHandle)
                # se não, o alvo é azul.
                else:
                    # então para o robô e manda a hélice girar no sentido horário por aproximadamente três segundos.
                    phiL, phiR, phiH = 0, 0, -2
                    setTargetSpeed(clientID, phiL, phiR, phiH, leftMotorHandle,
                                   rightMotorHandle, revoluteJointHandle)
                    time.sleep(3)
                    setTargetSpeed(clientID, phiL, phiR, 0, leftMotorHandle,
                                   rightMotorHandle, revoluteJointHandle)

                #se ainda há alvos
                if (targetIterator <= 8):
                    # pega a referência do próximo alvo.
                    err6, targetHandle = sim.simxGetObjectHandle(
                        clientID, 'Target' + str(targetIterator) + '#',
                        sim.simx_opmode_oneshot_wait)
                    targetIterator += 1
                # se não encerra o programa e pausa a simulação.
                else:
                    break
            # se o alvo ainda está longe, continua indo até ele com a hélice parada.
            else:
                phiL, phiR = movementControl(pos, goal, clientID, usensors)
                phiH = 0
                setTargetSpeed(clientID, phiL, phiR, phiH, leftMotorHandle,
                               rightMotorHandle, revoluteJointHandle)

        # descomente estas duas linhas se você quiser salvar as imagens capturadas.
        # imageItarator = 0
        #cv2.imwrite('capture' + str(imageIterator) + '.png', definitiveImage)
        #imageIterator += 1

        setTargetSpeed(clientID, phiL, phiR, phiH, leftMotorHandle,
                       rightMotorHandle, revoluteJointHandle)
        sim.simxPauseSimulation(clientID, sim.simx_opmode_oneshot_wait)
        sim.simxFinish(clientID)

    else:
        print('Connection failed.')
        exit(-2)
Exemple #14
0
 def check_server_connection(self):
     return sim.simxGetConnectionId(self.clientId) != -1