Beispiel #1
0
def display_train():
    pygame.init()
    # fenetre = pygame.display.set_mode((width, height), pygame.FULLSCREEN)
    fenetre = pygame.display.set_mode((width, height), pygame.RESIZABLE)
    fenetre.fill(BLACK)

    delay = st.get_system_refresh_period()

    th = mv.save_data(True)

    for fr in range(0, len(st.frequencies)):
        for block in range(0, 15):
            current_time = pygame.time.get_ticks()

            switch_time = current_time + switch_delay

            current_display_time = pygame.time.get_ticks()
            display_time = current_display_time + delay

            while current_time <= switch_time:
                stop()

                current_time = pygame.time.get_ticks()
                current_display_time = pygame.time.get_ticks()

                if current_display_time >= display_time:
                    display_time = current_display_time + delay
                    st.train_switch(fr, fenetre)

            current_time = pygame.time.get_ticks()
            gaze_time = current_time + train_delay

            current_display_time = pygame.time.get_ticks()
            display_time = current_display_time + delay
            j = 0
            while current_time <= gaze_time:
                mv.save_training = True
                mv.fileName = str(fr) + '_' + str(block)
                mv.label = fr
                stop()

                current_time = pygame.time.get_ticks()
                current_display_time = pygame.time.get_ticks()

                if current_display_time >= display_time:
                    display_time = current_display_time + delay
                    st.draw_stimuli(j, fenetre)
                    j += 1
            while not mv.saved:
                continue
            mv.saved = False
            mv.save_training = False

    mv.stop(th)
Beispiel #2
0
def main():
    pygame.init()
    # fenetre = pygame.display.set_mode((width, height), pygame.FULLSCREEN)
    fenetre = pygame.display.set_mode((width, height), pygame.RESIZABLE)
    fenetre.fill(BLACK)
    x, y = get_start()
    player_position = (x + 32, y + 32)
    end_rect, player = maze.build_game(player_position, x, y)
    maze.draw(fenetre, end_rect, player)
    # time in millisecond from start program
    current_time = pygame.time.get_ticks()

    # how long to show or hide

    delay = st.get_system_refresh_period()
    move_delay = 7000

    # time of next change
    change_time = current_time + delay
    j = 0
    th = mv.save_data()
    maze.draw(fenetre, end_rect, player)
    win = False

    while not win:
        for event in pygame.event.get():
            if event.type == pygame.QUIT or (event.type == pygame.KEYDOWN
                                             and event.key == pygame.K_ESCAPE):
                pygame.quit()
                sys.exit()

        win = maze.update(fenetre, end_rect)
        current_time = pygame.time.get_ticks()
        if current_time >= change_time:
            change_time = current_time + delay
            st.draw_stimuli(j, fenetre)
            j += 1
        if win:
            mv.stop(th)
            win_message(fenetre)
Beispiel #3
0
def handler(signal, frame):
    move.stop()
    sys.exit(0)
Beispiel #4
0
    	cv2.putText(crop_img,str(ang), (10,40), cv2.FONT_HERSHEY_SIMPLEX,1, (0,0,255), 2)
    	cv2.putText(crop_img,str(error), (10,320), cv2.FONT_HERSHEY_SIMPLEX,1, (255,0,0), 2)
    	cv2.line(crop_img, (int(x_min), int(y_min)), (halfway, int(y_min)), (255,0,0), 3)
    	Rad_Angle = math.radians(ang)
<<<<<<< HEAD
    	error_angle = math.radians(error/4)
    	move.rotate(dxl_io,1000, dt*Rad_Angle)
    	Correction(dxl_io, dt*error_angle)
    	time.sleep(dt)
=======
		err = math.radians(error/4)
        rotate(dxl_io, 300,dt*err + dt*Rad_Angle)
        time.sleep(dt)
>>>>>>> ce5c36cad41c5be65dfc3544a01fe047bc8741a7
    	print(error, ang)
    else:
    	print("I don't see a line")
    	Look_for_line(dxl_io)
    	time.sleep(dt)
    #Display the resulting frame
    #cv2.imshow('frame',crop_img)
    if cv2.waitKey(20) & 0xFF == ord('k'):
        move.stop()
        break

    if cv2.waitKey(1) & 0xFF == ord('q'):
        break

    #Pas de temps
    t += dt
Beispiel #5
0
def main():
    pid = PID(1, 0.1, 0.05, setpoint=0)
    rightSpeed = 0
    leftSpeed = 0
    direction = "r"
    track = "simple"
    if len(sys.argv) >= 2 and str(sys.argv[1]) == "left":
        direction = "l"
    if len(sys.argv) >= 3 and str(sys.argv[2]) == "hard":
        track = "hard"

    stream = io.BytesIO()
    if DEMO:
        # Create a window
        cv2.namedWindow(WINDOW_DISPLAY_IMAGE)
        # position the window
        cv2.moveWindow(WINDOW_DISPLAY_IMAGE, 0, 35)

    # Open connection to camera
    with picamera.PiCamera() as camera:
        # Set camera resolution
        camera.resolution = (RESOLUTION_X, RESOLUTION_Y)
        # Start loop
        while True:
            mod = 0
            # Get the tick count so we can keep track of performance
            e1 = cv2.getTickCount()
            # Capture image from camera
            camera.capture(stream, format='jpeg', use_video_port=True)
            # Convert image from camera to a numpy array
            data = np.fromstring(stream.getvalue(), dtype=np.uint8)
            # Decode the numpy array image
            image = cv2.imdecode(data, cv2.CV_LOAD_IMAGE_COLOR)
            # Empty and return the in-memory stream to beginning
            stream.seek(0)
            stream.truncate(0)
            # Create other images
            grey_image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
            display_image = cv2.copyMakeBorder(image, 0, 0, 0, 0,
                                               cv2.BORDER_REPLICATE)
            center_point = (SCAN_POS_X, SCAN_HEIGHT)
            if (track == "hard"):
                ret, thresh = cv2.threshold(grey_image, 127, 255, 0)
                thresh = thresh[100:240, 0:320]
                contours_right, hierarchy = cv2.findContours(
                    thresh[0:140, 170:320], cv2.RETR_CCOMP,
                    cv2.CHAIN_APPROX_SIMPLE)
                contours_left, hierarchy = cv2.findContours(
                    thresh[0:140, 0:150], cv2.RETR_CCOMP,
                    cv2.CHAIN_APPROX_SIMPLE)

                # If a interection is detected pull some evasive manouvers
                if len(contours_right) >= 1 and len(contours_left) >= 1:
                    contour_right = max(contours_right, key=cv2.contourArea)
                    contour_left = max(contours_left, key=cv2.contourArea)
                    extent_right = cv2.contourArea(contour_right)
                    extent_left = cv2.contourArea(contour_left)
                    print(extent_left, extent_right)
                    if extent_left >= 21000 / 5 and extent_right >= 21000 / 5:
                        print(
                            "More than 2 large contours, most likely an intersection, EVASIVE MANOUVERS !"
                        )
                        cv2.rectangle(display_image, (0, 0), (160, 240),
                                      (0, 0, 0), -1)
                        cv2.rectangle(display_image, (0, 0), (320, 80),
                                      (0, 0, 0), -1)
                        cv2.rectangle(grey_image, (0, 0), (160, 240),
                                      (0, 0, 0), -1)
                        cv2.rectangle(grey_image, (0, 0), (320, 80), (0, 0, 0),
                                      -1)
                        mod = -1
                    if DEMO:
                        croppedImg = image.copy()
                        croppedImg_right = croppedImg[100:240, 170:320]
                        croppedImg_left = croppedImg[100:240, 0:150]
                        cv2.drawContours(croppedImg_right, contours_right, 0,
                                         (0, 0, 255), 2)
                        cv2.drawContours(croppedImg_left, contours_left, 0,
                                         (0, 255, 0), 2)
                        numpy_horizontal = np.hstack(
                            (croppedImg_left, croppedImg_right))
            else:
                numpy_horizontal = None
            # San a horizontal line based on the centre point
            # We could just use this data to work out how far off centre we are and steer accordingly.
            # Get a data array of all the falues along that line
            # scan_data is an array containing:
            #   - pixel value
            scan_data = scanLine(grey_image, display_image, center_point,
                                 SCAN_RADIUS)
            # The center point we believe the line we are following intersects with our scan line.
            point_on_line = findLine(display_image, scan_data, SCAN_POS_X,
                                     SCAN_HEIGHT, SCAN_RADIUS)
            # Start scanning the arcs
            # This allows us to look ahead further ahead at the line and work out an angle to steer
            # From the intersection point of the line, scan in an arc to find the line
            # The scan data contains an array
            #   - pixel value
            #   - x co-ordinate
            #   - y co-ordinate
            returnVal, scan_data = scanCircle(grey_image, display_image,
                                              point_on_line, SCAN_RADIUS_REG,
                                              -90)
            previous_point = point_on_line
            # in the same way ads the findLine, go through the data, find the mid point and return the co-ordinates.
            last_point = findInCircle(display_image, scan_data)
            cv2.line(display_image, (previous_point[0], previous_point[1]),
                     (last_point[0], last_point[1]), (255, 255, 255), 1)

            actual_number_of_circles = 0
            for scan_count in range(0, NUMBER_OF_CIRCLES + mod):
                returnVal, scan_data = scanCircle(
                    grey_image, display_image, last_point, SCAN_RADIUS_REG,
                    lineAngle(previous_point, last_point))

                # Only work out the next itteration if our point is within the bounds of the image
                if returnVal == True:
                    actual_number_of_circles += 1
                    previous_point = last_point
                    last_point = findInCircle(display_image, scan_data)
                    cv2.line(display_image,
                             (previous_point[0], previous_point[1]),
                             (last_point[0], last_point[1]), (255, 255, 255),
                             1)
                else:
                    break
            # Draw a line from the centre point to the end point where we last found the line we are following
            cv2.line(display_image, (center_point[0], center_point[1]),
                     (last_point[0], last_point[1]), (0, 0, 255), 1)

            # Display the image
            if DEMO:
                cv2.imshow(WINDOW_DISPLAY_IMAGE, display_image)
                if numpy_horizontal is not None:
                    cv2.imshow('Contours', numpy_horizontal)

            # This is the maximum distance the end point of our search for a line can be from the centre point.
            line_scan_length = SCAN_RADIUS_REG * (actual_number_of_circles + 1)
            # This is the measured line length from the centre point
            line_length_from_center = lineLength(center_point, last_point)
            center_y_distance = center_point[1] - last_point[1]
            center_x_distance = center_point[0] - last_point[0]

            # Stop counting all work is done at this point and calculate how we are doing.
            e2 = cv2.getTickCount()
            bearing = lineAngle(center_point, last_point) * -1 - 90
            returnString = "fps {} - bearing {} - x:{} y:{} look distance:{} distance from origin:{}".format(
                1000 / ((e2 - e1) / cv2.getTickFrequency() * 1000), bearing,
                center_x_distance, center_y_distance, line_scan_length,
                line_length_from_center)
            print(returnString)

            if MOVE:
                if mod == -1 and bearing <= 5:
                    print(
                        "Derped out on intersection setting bearing to 45 degrees"
                    )
                    bearing = 45

                rightSpeed, leftSpeed, pid = move.move(bearing, rightSpeed,
                                                       leftSpeed, pid)
            # Wait for ESC to end program
            c = cv2.waitKey(7) % 0x100
            if c == 27:
                break
    move.stop()
    cv2.destroyAllWindows()
    return
Beispiel #6
0
        line_sensor.wait_for_line(seek_time)
        if line_sensor.value < 0.5:
            ret = True
            break
        else:
            direction = not direction
            if direction:
                seek_count += 1
            continue

    return ret

parser.add_option("-s", "--speed", type="float", dest="speed")
(options, args) = parser.parse_args()
if options.speed != None:
    speed = options.speed

line_sensor.when_line = lambda: move.forward()
line_sensor.when_no_line = lambda: move.stop()

signal.signal(signal.SIGINT, exit_gracefully)

while True:
    move.forward(speed)
    line_sensor.wait_for_no_line()
    if not seek_line():
        print("Can't find any line")
        break

exit_gracefully()
Beispiel #7
0
#!/usr/bin/env python3

import move
import signal
from gpiozero import DistanceSensor

min_distance = 0.15
sonic_sensor = DistanceSensor(
    echo=18, trigger=17, threshold_distance=min_distance)
speed = 0.5


def exit_gracefully():
    exit()


sonic_sensor.when_in_range = lambda: move.stop()
sonic_sensor.when_out_of_range = lambda: move.forward()

signal.signal(signal.SIGINT, exit_gracefully)

move.forward(speed)
while True:
    sonic_sensor.wait_for_in_range()
    move.backward(speed)
    sonic_sensor.wait_for_out_of_range()
    move.right(speed, 0.75)


exit_gracefully()