def interactiveControl():
    setupInteractiveControl()
    clock = pygame.time.Clock()
    with picamera.PiCamera() as camera:
        camera.resolution = (WIDTH, HEIGHT)
        camera.start_preview(fullscreen=False,
                             window=(500, 50, CP_WIDTH, CP_HEIGHT))
        command = 'idle'
        while (True):
            up_key, down, left, right, change, stop = getKeys()
            getDistance()
            if stop:
                break
            if change:
                command = 'idle'
                if up_key:
                    command = 'forward'
                    forward(FB_TIME)
                elif down:
                    command = 'reverse'
                    reverse(FB_TIME)
                elif left:
                    command = 'left'
                    lef(LR_TIME)
                elif right:
                    command = 'right'
                    righ(LR_TIME)
            print(command)
            if (command in ('forward', 'right', 'left')):
                input_img = save_image_with_direction(command)
                camera.capture(input_img, use_video_port=True)
            clock.tick(10)
        pygame.quit()
Esempio n. 2
0
def move(direction):
    # Choose the direction of the request
    if direction == 'camleft':
        video_dir.move_decrease_x()
    elif direction == 'camright':
        video_dir.move_increase_x()
    elif direction == 'camup':
        video_dir.move_increase_y()
    elif direction == 'camdown':
        video_dir.move_decrease_y()
    elif direction == 'camhome':
        video_dir.home_x_y()
    elif direction == 'record':
        subprocess.call(['motion'], shell=True)
    elif direction == 'stoprecord':
        subprocess.call(['./stop.sh'], shell=True)
    elif direction == 'forward':
        motor.forward()
    elif direction == 'reverse':
        motor.backward()
    elif direction == 'left':
        car_dir.turn_left()
    elif direction == 'right':
        car_dir.turn_right()
    elif direction == 'stop':
        motor.ctrl(0)
    elif direction == 'home':
        car_dir.home()
Esempio n. 3
0
def move(direction):
    # Choose the direction of the request
    if direction == 'camleft':
        video_dir.move_decrease_x()
    elif direction == 'camright':
        video_dir.move_increase_x()
    elif direction == 'camup':
        video_dir.move_increase_y()
    elif direction == 'camdown':
        video_dir.move_decrease_y()
    elif direction == 'camhome':
        video_dir.home_x_y()
    elif direction == 'record':
        subprocess.call(['motion'], shell=True)
    elif direction == 'stoprecord':
        subprocess.call(['./stop.sh'], shell=True)
    elif direction == 'forward':
        motor.forward()
    elif direction == 'reverse':
        motor.backward()
    elif direction == 'left':
        car_dir.turn_left()
    elif direction == 'right':
        car_dir.turn_right()
    elif direction == 'stop':
        motor.ctrl(0)
    elif direction == 'home':
        car_dir.home()
Esempio n. 4
0
def main():
	wm=wimote.connect(Led)
	while True:
		button=wimote.whichbutton(wm)
		time.sleep(0.05)
		wm.rumble=False
	
			
		#Moving Forwards		
		if button==3:
			distance_front = usonic.reading(Trigger_front,Echo_front)
			print distance_front
			if distance_front < Collision:
				wm.rumble=True
				#led.off(Led)
				motor.stop()
			elif distance_front >= Collision:
				#led.on(Led)
				motor.forward()
		#Reverse
		if button==4:
			motor.reverse()
		if button==7:
			motor.cleanup()
			led.cleanup()
			usonic.cleanup()
			sys.exit()
		if button==None:
			led.off(Led)
			motor.stop()
def RunTheCar(source, destination):
    global isEnd
    global isLocalization
    global LocationArray
    PATH = findPath(LocationArray[source].Name,
                    LocationArray[destination].Name)

    isEnd = 0

    forObstacleTurn(0)

    threads = []

    # Create new threads
    thread1 = myThread(1, "Front Sensor Thread", FrontSensorPins)
    #thread2 = myThread(2, "Left Sensor Thread", LeftSensorPins)

    # Start new Threads
    thread1.start()
    #thread2.start()

    # Add threads to thread list
    threads.append(thread1)
    #threads.append(thread2)

    for i in range(0, PATH.__len__()):
        temp = PATH.pop()

        string = "   " + temp.direction

        if temp.direction == "START":
            string = string + "         "
        elif temp.direction == "TURN 180":
            string = string + "        "
            motor.Turn180()
            time.sleep(1)
        elif temp.direction == "FORWARD":
            string = string + "       "
            motor.forward(float(temp.distance) * 0.75)
        elif temp.direction == "RIGHT":
            string = string + "         "
            time.sleep(0.3)
            motor.RightAndForward(float(temp.distance) * 0.75)
            time.sleep(0.3)
        elif temp.direction == "LEFT":
            string = string + "          "
            time.sleep(0.3)
            motor.LeftAndForward(float(temp.distance) * 0.75)
            time.sleep(0.3)

        string = string + str(temp.distance) + "              " + temp.arrived

    forObstacleTurn(1)

    # Telling threads to kill themselves
    isEnd = 1

    # Wait for all threads to complete
    for t in threads:
        t.join()
def main():
    neuropy = NeuroPy("/dev/rfcomm0") #instantiate NeuroPy class

    neuropy.start() #start thread
    start_time = 0 
    blinked = False #if blink is detected
    last_blink_time = 0
    double_blink = False
    triple_blink = False
    i = 100
    j = 100

    try:
        while i < 20000:
            i = i + 1 
            value = neuropy.rawValue #gets the raw brain activity level
            print value, "   ", i 
            
            if value > 100: #if the raw level gets above 200, which indicates a spike, start the clock
                start_time = time.clock()
                #print value
            if start_time:
                if value <  -100: #if the spike in brain activity is over
                    total_time = time.clock() - start_time #how long the spike was
                    start_time = 0
                    if 0.01 < total_time < 0.050: #if the spike was a certain length
                        if last_blink_time and time.clock() - last_blink_time < .6: #if the blink occured right after the previous blink
                            if double_blink:
                                triple_blink = True
                            else :
                                double_blink = True
                        last_blink_time = time.clock() #reset the clock
                        i+=1
                        blinked = True
            if blinked and time.clock()-last_blink_time > .61: #if a certain amount of time has passed since the last blink
                if triple_blink:
                    print "Triple blink detected .......... turning left"
                    motor.forward_left()
                    time.sleep(3)
                    motor.forward()
                    break
                
                elif double_blink:
                    print "Double blink detected ........... turning right"
                    motor.forward_right()
                    time.sleep(3)
                    motor.forward()
                    break
                    
                double_blink = blinked = triple_blink = False
        time.sleep(2)
        return
    finally:
        neuropy.stop()
Esempio n. 7
0
def camera_pic():
    try:
        #alpha: Preview halb-durchsichtig starten (0-255) alpha = 200
        camera.start_preview(fullscreen=True)
        # Text konfigurieren
        camera.annotate_text_size = 160  #6-160
        camera.annotate_background = Color('black')
        camera.annotate_foreground = Color('white')

        # Kamera wartet 2 Sekunden, bevor der Countdown startet
        time.sleep(2)

        # Countdown läuft runter 5.. 4..
        for i in range(5, 2, -1):
            camera.annotate_text = "%s" % i
            time.sleep(.5)
        camera.annotate_text = ""

        # Pfad für die Bilder erstellen
        pfad_temp = pfad_pics + '/pics_session'
        os.mkdir(pfad_temp)

        # bei 3 startet die Kamera mit der Aufnahme
        for i in range(num_pic):
            camera.capture(pfad_temp + '/image{0:02d}.jpg'.format(i))
            motor.forward(0.001, 25)
            time.sleep(0.2)

        #Preview wird beendet
        camera.stop_preview()

        #motor zurücksetzen
        motor.backwards(0.001, 100)
        motor.setStep(0, 0, 0, 0)

        # Funktion gif aufrufen, temporären Pics-session-Ordner übergeben
        pfad_gif_return = gif(pfad_temp)

        # Inhalt des Pics-session-Ordner löschen
        for root, dirs, files in os.walk(pfad_temp, topdown=False):
            for name in files:
                os.remove(os.path.join(root, name))
            for name in dirs:
                os.rmdir(os.path.join(root, name))
        # Pics-session-Ordner löschen
        os.rmdir(pfad_temp)

        return pfad_gif_return

    except KeyboardInterrupt:
        #bei Unterbrechen des Programms mittels ctrl+c wird die preview beendet
        camera.stop_preview()
Esempio n. 8
0
def command(cmd=None):
    if cmd == STOP:
        motor.stop()
    elif cmd == FORWARD:
        motor.forward()
    elif cmd == BACKWARD:
        motor.backward()
    elif cmd == LEFT:
        motor.left()
    else:
        motor.right()
    response = "Moving {}".format(cmd.capitalize())
    return response, 200, {'Content-Type': 'text/plain'}
Esempio n. 9
0
def func_return(counter, move_time, current_rotate_position, rotate_time):

    current_rotate_position = angle_fix(current_rotate_position, rotate_time)

    if counter != 0:
        forward(
            move_time
        )  #here move_timeshould be the same value with the previous function,前面退了几次这里一次性前进回去
        pointer = 0
        counter = counter - 1
    else:
        pass
    return counter, current_rotate_position
Esempio n. 10
0
def attention_level(attention_value):
    neuropy.setCallBack("attention",attention_level)
    av = attention_value
    print "\nAttention level = ", attention_value
    if attention_value > 50:	#check if attention is above threshold
        print "Moving forward"
        motor.forward()	#forward motion of wheelchair
        time.sleep(1)
        ultrasonic.ultra()	#check for obbstacle distance
        
    else :
        print("Stopped                  Low Attention")
        motor.stop()
    return 
Esempio n. 11
0
def change(changePin):
    changePin = int(changePin)
    if changePin == 1:
        motor.turnLeft()
    elif changePin == 2:
        motor.forward()
    elif changePin == 3:
        motor.turnRight()
    elif changePin == 4:
        motor.backward()
    elif changePin == 5:
        motor.stop()
    else:
        print("Wrong command")

    return render_template("index.html")
Esempio n. 12
0
def command(cmd=None):
    if cmd == STOP:
        led.led_stop()
        motor.stop()
    elif cmd == FORWARD:
        led.led_off()
        motor.forward()
    elif cmd == BACKWARD:
        led.led_off()
        motor.backward()
    elif cmd == LEFT:
        led.led_left()
        motor.left()
    elif cmd == RIGHT:
        led.led_right()
        motor.right()
    return "Success", 200, {'Content-Type': 'text/plain'}
Esempio n. 13
0
async def hello(websocket, path):
    while True:
        msg = await websocket.recv()
        print("< {}".format(msg))

        if msg == 'forward':
            motor.forward()

        if msg == 'stop':
            motor.stop()

        if msg == 'right':
            motor.right()

        if msg == 'left':
            motor.left()

        if msg == 'backward':
            motor.backward()
Esempio n. 14
0
def main():
    recognizer = aiy.cloudspeech.get_recognizer()
    recognizer.expect_phrase('on')
    recognizer.expect_phrase('off')

    aiy.audio.get_recorder().start()

    while True:
        print('Listening')
        text = recognizer.recognize()
        if text is None:
            print('waiting')
        else:
            print('Heard "', text, '"')
            if 'drive' in text:
                motor.forward(0.5, 2)
            elif 'left' in text:
                motor.leftturn(0.5, 2)
            elif 'right' in text:
                motor.rightturn(0.5, 2)
Esempio n. 15
0
def choice(input,conn):
    t=5
    start = t
    inc = 0.5
    try:
	if input == 'f':
            while t>0:
                if(sensor.willFrontCrash()):
                    print('stopped after ' + str(start-t))
                    break
                motor.forward(inc)
                t = t-inc
	    print('Move forward')
    	elif input == 'b':
            while t>0:
                if(sensor.willBackCrash()):
                    print('stopped after ' + str(start-t))
                    break
                motor.backward(inc)
                t = t-inc
	    print('Move backward')
    	elif input == 'r':
            motor.turnRight(t)
	    print('turn right')
    	elif input == 'l':	
            motor.turnLeft(t)
	    print('turn left')
	elif input == 's':
            reading = sensor.getAllSensor()
            print('Read all sensors')
            print(reading)
            result = " ".join(reading)
            conn.send(result)
    except KeyboardInterrupt:
	    print('key board interrupted!')
    finally:
            print('done')
    conn.send("\n")
Esempio n. 16
0
 def advance(self, img, speed, adjust_speed, cameraPos, centered_bool,
             blob_bool):
     """Drive the robot forward toward the detected object.
     
     Args:
         img: Image.
         speed (int): Speed of robot motors. 
         adjust_speed (int): Small change of speed to turn while moving. 
         cameraPos (int): Position of servos holding camera.
         centered_bool (bool): True if centered. Otherwise, false.
         blob_bool (bool): True if blob detected. False otherwise.
         
     Returns:
         False if object's area is large beyond threshold. Otherwise, true.   
     
     """
     mean_sq, mean_max, max_area, bool_x, bool_y, blob_bool = self.find_blob(
         img)
     if blob_bool:
         motor.forward(speed)
         centered_bool, xShift, yShift = self.check_centered(mean_max)
         if not centered_bool:
             self.orient_to_blob(xShift, yShift, speed, adjust_speed, \
                                             cameraPos)
         if self.dist_count == 0:
             self.init_area = max_area
         else:
             if max_area > 1.8 * self.init_area or max_area > (
                     2 * self.xCenter * 2 * self.yCenter) // 4:
                 motor.brake()
                 self.dist_count = 0
                 return False
         self.dist_count += 1
         if self.dist_count > 100:
             self.dist_count = 1
     else:
         motor.seek(speed)
     return True
Esempio n. 17
0
    def do_POST(self):
        req = urlparse(self.path)

        if req.path == re.search('/turn', self.path):
            car_dir.turn(req.body.angle)
        elif req.path == re.search('/motors', self.path):
            if req.body.forward == True:
                motor.forward()
            elif req.body.backward == True:
                motor.forward()
            elif req.body.stop == True:
                motor.forward()
        elif req.path == re.search('/speed', self.path):
            motor.setSpeed(req.body.speed)

        self.send_response(200)
        self.send_header('Content-type', 'application/json')
        self.end_headers()
Esempio n. 18
0
            centered_face_bool, xShift_face, yShift_face = blob.check_centered(
                point)
            straighten_bool = blob.check_straighten(cameraPos)
            print('centered', centered_face_bool, 'straighten',
                  straighten_bool)
            print('cameraPos', cameraPos)
            if not centered_face_bool or not straighten_bool:  # If the robot is not pointing at the person's face,
                # straighten up.
                cameraPos = blob.straighten_up(xShift_face, yShift_face, 0, adjust_speed * 4,\
                                                cameraPos)
            else:
                # Visual cue that the robot is about to set the face's neutral expression.
                if not neutralBool:
                    motor.backward(speed)
                    time.sleep(.3)
                    motor.forward(speed)
                    time.sleep(.3)
                    motor.brake()
                    time.sleep(.5)
                # Set neutral expression if face is properly aligned.
                neutralBool, neutralFeaturesUpper, neutralFeaturesLower \
                             =face_op.set_neutral(feat, newFeaturesUpper, newFeaturesLower, neutralBool,tol)

        # Increase size of frame for viewing.
        big_frame = cv2.resize(small_frame, (0, 0),
                               fx=scaleUp * 1 / scaleFactor,
                               fy=scaleUp * 1 / scaleFactor)

        # Show text on video.
        for idxJ, dd in enumerate(dict_upper):
            cv2.putText(big_frame, dd, (10, pos_upper[idxJ]), font, font_size,
	print 'Waiting for connection...'
	# Waiting for connection. Once receiving a connection, the function accept() returns a separate 
	# client socket for the subsequent communication. By default, the function accept() is a blocking 
	# one, which means it is suspended before the connection comes.
	tcpCliSock, addr = tcpSerSock.accept() 
	print '...connected from :', addr     # Print the IP address of the client connected with the server.

	while True:
		data = ''
		data = tcpCliSock.recv(BUFSIZ)    # Receive data sent from the client. 
		# Analyze the command received and control the car accordingly.
		if not data:
			break
		if data == ctrl_cmd[0]:
			print 'motor moving forward'
			motor.forward()
		elif data == ctrl_cmd[1]:
			print 'recv backward cmd'
			motor.backward()
		elif data == ctrl_cmd[2]:
			print 'recv left cmd'
			car_dir.turn_left()
		elif data == ctrl_cmd[3]:
			print 'recv right cmd'
			car_dir.turn_right()
		elif data == ctrl_cmd[6]:
			print 'recv home cmd'
			car_dir.home()
		elif data == ctrl_cmd[4]:
			print 'recv stop cmd'
			motor.ctrl(0)
Esempio n. 20
0
def motor_forward(request):
	motor.forward()
	return HttpResponse("Motor forward")
Esempio n. 21
0
def server(
    value, collision
):  #get value as Direction and collision as Collision detection from Shared memory
    pre_dir = 'home'
    x = 0
    flag = 0
    while True:
        sys.stdout.flush()
        print 'Waiting for connection...'
        # Waiting for connection. Once receiving a connection, the function accept() returns a separate
        # client socket for the subsequent communication. By default, the function accept() is a blocking
        # one, which means it is suspended before the connection comes.
        tcpCliSock, addr = tcpSerSock.accept()
        print '...connected from :', addr  # Print the IP address of the client connected with the server.

        while True:
            data = ''
            data = tcpCliSock.recv(BUFSIZ)
            # Receive data sent from the client.
            # Analyze the command received and control the car accordingly.
            print data
            if not data:
                break
            if x == 1:
                if flag < 5:
                    flag = flag + 1
                    continue  #if there is any collision, Do not receive data from client.If so, Get stacked!
            x = 0  #after refusing data from client, start receiving
            flag = 0
            if data == ctrl_cmd[0]:  #If Client send message "forward"
                if collision.value == 1:  #if there is obstacle in front of iCar
                    motor.ctrl(0)  #stop
                else:
                    motor.forward()  #Run the motors to forward
            elif data == ctrl_cmd[1]:  #If Client send message "backward"
                motor.backward()

            elif data == ctrl_cmd[2]:  #If Client send message "left"
                car_dir.turn_left()  #change car direction to Left

            elif data == ctrl_cmd[3]:  #If Client send message "right"
                car_dir.turn_right()  #change car direction to Right

            elif data == ctrl_cmd[6]:  #If Client send message "home"
                car_dir.home()  #Set car direction to center

            elif data == ctrl_cmd[4]:  #if Client send message "stop"
                motor.ctrl(0)  #Stop Motor running

            elif data == ctrl_cmd[
                    5]:  #If Client click auto button, send message "auto"
                #auto drive
                motor.setSpeed(44)  #Set motor speed with  optimized speed 44
                temp = value.value  #get Value from jeongwook.py that is information of Car Direction

                if collision.value != 0:  #If there is Collision
                    print 'Collision detected'
                    if collision.value == 1:  #Collision in front of iCar
                        print "Obstacle In Front"
                        motor.collision_auto(
                        )  #collision_auto function let iCar move backward
                        car_dir.turn_right()  #to avoid collision, turn right
                        motor.forward_auto(
                        )  #move only for 0.8second (forward_auto let iCar move for 0.2second)
                        motor.forward_auto()
                        motor.forward_auto()
                        motor.forward_auto()
                        car_dir.home()
                        motor.forward_auto()
                        motor.forward_auto()
                        pre_dir = 'left'  #if iCar cannot detect any lane, it SHOULD BE on left side, so makes iCar go left after avoiding
                    elif collision.value == 2:  #Collision on Left side
                        print "Obstacle is on Left"
                        motor.collision_auto()
                        car_dir.turn_right()  #to avoid collision, turn right
                        motor.forward_auto()
                        motor.forward_auto()
                        motor.forward_auto()
                        motor.forward_auto()
                        car_dir.home()
                        motor.forward_auto()
                        motor.forward_auto()
                        pre_dir = 'left'
                    elif collision.value == 3:  #go left
                        print "Obstacle is on Right"
                        motor.collision_auto()
                        car_dir.turn_left()  #to avoid collision, turn left
                        motor.forward_auto()
                        motor.forward_auto()
                        motor.forward_auto()
                        motor.forward_auto()
                        car_dir.home()
                        motor.forward_auto()
                        motor.forward_auto()
                        pre_dir = 'right'  #if iCar cannot detect any lane, it SHOULD BE on right side, so makes iCar go right after avoiding

                    collision.value = 0

                    x = 1  #set x = 1 to Not receiving data from client for a moment

                elif temp == 1:  #iCar is on Lane, Go with center direction
                    print 'Lane is on my way'
                    car_dir.home()
                    motor.forward_auto()  #move iCar for 0.2second with 44speed
                    #because of camera delay

                elif temp == 2:  #lane is located on left side
                    print 'Lane is on left side'
                    car_dir.turn_left()
                    motor.forward_auto()
                    pre_dir = 'left'

                elif temp == 3:  #lane is located on right side
                    print 'Lane is on right side'
                    car_dir.turn_right()
                    motor.setSpeed(44)
                    motor.forward_auto()
                    pre_dir = 'right'
                else:
                    if pre_dir == 'right':  #when No detection but predict that lane is on right side
                        print 'cannot find but go right'
                        car_dir.turn_right()
                        motor.setSpeed(44)
                        motor.forward_auto()

                    elif pre_dir == 'left':  #when No detection but predict that lane is on left side
                        print 'cannot find but go left'
                        car_dir.turn_left()
                        motor.forward_auto()

                    else:
                        print 'Cannot find a Lane'  #No detection with no prediction
                        car_dir.home()  #set center direction and stop
                        motor.backward()
                        time.sleep(0.6)
                        motor.ctrl(0)
                        time.sleep(1)

            elif data == ctrl_cmd[7]:  #move camera right
                video_dir.move_increase_x()
            elif data == ctrl_cmd[8]:  #move camera left
                video_dir.move_decrease_x()
            elif data == ctrl_cmd[9]:  #move camera up
                video_dir.move_increase_y()
            elif data == ctrl_cmd[10]:  #move camera down
                video_dir.move_decrease_y()

            elif data[0:5] == 'speed':  # Change the speed
                print data
                numLen = len(data) - len('speed')
                if numLen == 1 or numLen == 2 or numLen == 3:
                    tmp = data[-numLen:]
                    spd = int(tmp)
                    if spd < 24:
                        spd = 24
                    motor.setSpeed(spd)

            elif data[
                    0:
                    7] == 'leftPWM':  #If Client send message like ~PWM, it is for initialization or servo motor that control Car direction.
                numLen = len(data) - len('leftPWM')
                pwm = data[-numLen:]
                leftpwm = int(pwm)

                car_dir.change_left(leftpwm)
            elif data[0:7] == 'homePWM':
                numLen = len(data) - len('homePWM')
                pwm = data[-numLen:]
                homepwm = int(pwm)

                car_dir.change_home(homepwm)
            elif data[0:8] == 'rightPWM':
                numLen = len(data) - len('rightPWM')
                pwm = data[-numLen:]
                rightpwm = int(pwm)
                car_dir.change_right(rightpwm)

            else:
                print 'Command Error! Cannot recognize command: ' + data

    tcpSerSock.close()
Esempio n. 22
0
        motor.setSpeed(int(speed_pred * 62 + 40))
        pred = pred[0][0][0]

        i += 1

        data = ctrl_cmd[0]
        if stop_detected:
            data = ctrl_cmd[4]
        elif pred < 0.03 and pred > -0.03:
            data = ctrl_cmd[6]
        else:
            angle = int((pred / 2 + 0.5) * 170 + 35)
            data = "turn=" + str(angle)

        if lstop_detected:
            motor.forward()
            lstop_detected = False

        if data == ctrl_cmd[0]:
            print 'motor moving forward'
            motor.forward()
        elif data == ctrl_cmd[1]:
            print 'recv backward cmd'
            motor.backward()
        elif data == ctrl_cmd[2]:
            print 'recv left cmd'
            car_dir.turn_left()
        elif data == ctrl_cmd[3]:
            print 'recv right cmd'
            car_dir.turn_right()
        elif data == ctrl_cmd[6]:
Esempio n. 23
0
>>>>>>> ultrasonic:reinforced.py

for i in range(EPOCHS):
	game_over = False
	input_img, errors = getImage()
	errors = False
	reward = 0
	while game_over==False:
		if np.random.rand() <= epsilon:
			action = np.random.randint(0, moves, size=1)[0]
		else:
			output = model.predict(input_img)
			action = np.argmax(output[0])
		if int(action) == 0:
<<<<<<< HEAD:Using-Object-Detection-For-Reward/reinforced.py
			forward(1.25)
			print('forward')
		elif int(action) == 1:
			right(1.25)
			print('right')
		else:
			left(1.25)
=======
			forward(FB_TIME)
			print('forward')
		elif int(action) == 1:
			right(LR_TIME)
			print('right')
		else:
			left(LR_TIME)
>>>>>>> ultrasonic:reinforced.py
Esempio n. 24
0
def motor_op(r,x,y):
    post_step = 300
    
    if r and y > 500:
        if 0 < x <= 200:
            print('small cw')
            motor.turn(40)
            motor.forward(200)
            motor.turn(-40)
            motor.forward(post_step)
        
        elif 200 < x <= 400:
            print('cw')
            motor.turn(45)
            motor.forward(350)
            motor.turn(-45)
            motor.forward(post_step)
            
        elif 400 < x <= 600:
            print('ccw')
            motor.turn(45)
            motor.forward(350)
            motor.turn(-45)
            motor.forward(post_step)
        else:    # >600
            print('small ccw')
            motor.turn(-40)
            motor.forward(200)
            motor.turn(40)
            motor.forward(post_step)
            
    elif r and y > 200:
        print("small forward")
        motor.forward(200)
    else:
        print("forward")
        motor.forward(300)
Esempio n. 25
0
    raw_accX = sensor.read_raw_accel_x()
    raw_accY = sensor.read_raw_accel_y()
    raw_accZ = sensor.read_raw_accel_z()
    
    # Accelerometer value Degree Per Second / Scalled Data
    rate_accX = sensor.read_scaled_accel_x()
    rate_accY = sensor.read_scaled_accel_y()
    rate_accZ = sensor.read_scaled_accel_z()
    
    # http://ozzmaker.com/2013/04/18/success-with-a-balancing-robot-using-a-raspberry-pi/
    accAngX = ( math.atan2(rate_accX, rate_accY) + M_PI ) * RAD_TO_DEG
    CFangleX = K * ( CFangleX + rate_gyroX * time_diff) + (1 - K) * accAngX
    
    # http://blog.bitify.co.uk/2013/11/reading-data-from-mpu-6050-on-raspberry.html 
    accAngX1 = get_x_rotation(rate_accX, rate_accY, rate_accX)
    CFangleX1 = ( K * ( CFangleX1 + rate_gyroX * time_diff) + (1 - K) * accAngX1 )
    
    # Followed the Second example because it gives resonable pid reading
    pid = int(p.update(CFangleX1))
    speed = pid * 30

    if(pid > 0):
        MOTOR.forward(speed)
    elif(pid < 0):
        MOTOR.backward( abs(speed) )
    else:
	MOTOR.stop()

    print "{0:.2f} {1:.2f} {2:.2f} {3:.2f} | {4:.2f} {5:.2f} | {6:.2f} | {7:.2f} ".format( gyroAngleX, gyroAngleY , accAngX, CFangleX, accAngX1, CFangleX1, pid, speed)
    #print "{0:.2f} {1:.2f}".format( sensor.read_pitch(), sensor.read_roll())
Esempio n. 26
0
def drive(stops):
    # setup all devices and initialize values
    busnum = 1
    car_dir.setup(busnum=busnum)
    motor.setup(busnum=busnum)
    distance_detect.setup()
    car_dir.home()
    motor.setSpeed(30)

    video_capture = cv2.VideoCapture(-1)
    video_capture.set(3, 160)
    video_capture.set(4, 120)
    command = 0
    stopCount = 0
    stopFound = 0

    #drive until passed certain amount of stops
    while True:
        # capture video frames
        ret, frame = video_capture.read()

        # set color masking boundaries for red and mask image
        colorLower = np.array([0, 0, 100], dtype='uint8')
        colorUpper = np.array([50, 50, 255], dtype='uint8')
        colorMask = cv2.inRange(frame, colorLower, colorUpper)
        outMask = cv2.bitwise_and(frame, frame, mask=colorMask)

        # create mask image, convert to grayscale, and blur
        clonedImg = outMask.copy()
        clonedImg = cv2.cvtColor(clonedImg, cv2.COLOR_BGR2GRAY)
        clonedImg = cv2.GaussianBlur(clonedImg, (5, 5), 0)

        # show current image
        cv2.namedWindow('Gray color select', cv2.WINDOW_NORMAL)
        cv2.imshow('Gray color select', clonedImg)

        # calculate circles within image
        circles = cv2.HoughCircles(clonedImg,
                                   cv2.cv.CV_HOUGH_GRADIENT,
                                   1,
                                   20,
                                   param1=50,
                                   param2=30,
                                   minRadius=15,
                                   maxRadius=100)

        # if a circle was detected
        if circles != None:
            # if this is first time encountering this stop increase stop count
            if stopFound == 0:
                stopCount += 1
                stopFound = 1

            # map out circles for display
            circles = np.uint16(np.around(circles))

            for i in circles[0, :]:
                cv2.circle(clonedImg, (i[0], i[1]), i[2], (0, 255, 0), 2)
                cv2.circle(clonedImg, (i[0], i[1]), 2, (0, 255, 0), 3)
        elif (cv2.countNonZero(clonedImg) == 0):
            stopFound = 0

        # display camera feed and circles
        cv2.namedWindow('Circles', cv2.WINDOW_NORMAL)
        cv2.imshow('Circles', clonedImg)

        # crop the image
        crop_img = frame[60:120, 0:160]

        # convert to grayscale
        gray = cv2.cvtColor(crop_img, cv2.COLOR_BGR2GRAY)

        # gaussian blur
        blur = cv2.GaussianBlur(gray, (5, 5), 0)

        # color thresholding
        ret, thresh = cv2.threshold(blur, 60, 255, cv2.THRESH_BINARY_INV)

        # find the contours of the frame
        contours, hierarchy = cv2.findContours(thresh.copy(), 1,
                                               cv2.CHAIN_APPROX_NONE)

        # detect distance to closes object
        dis = distance_detect.checkdis()
        print "distance: ", dis

        # find the biggest contour (if detected)
        if len(contours) > 0 and dis >= 15:
            c = max(contours, key=cv2.contourArea)
            M = cv2.moments(c)

            if M['m00'] != 0:
                cx = int(M['m10'] / M['m00'])
                cy = int(M['m01'] / M['m00'])

            cv2.line(crop_img, (cx, 0), (cx, 720), (255, 0, 0), 1)
            cv2.line(crop_img, (0, cy), (1280, cy), (255, 0, 0), 1)
            cv2.drawContours(crop_img, contours, -1, (0, 255, 0), 1)

            if cx >= 120:
                print "Turn Right ", cx
                car_dir.turn_right()
                motor.forward()

            if cx < 120 and cx > 50:
                print "On Track! ", cx
                car_dir.home()
                motor.forward()

            if cx <= 50:
                print "Turn Left! ", cx
                car_dir.turn_left()
                motor.forward()

        else:
            if dis < 15:
                print "something blocking me"
                car_dir.home()
                motor.stop()
            else:
                print "I don't see the line"
                car_dir.home()
                motor.backward()

        # display the resulting frame
        cv2.namedWindow('frame', cv2.WINDOW_NORMAL)
        cv2.imshow('frame', crop_img)

        # exit condition after passing certain amount of stops or 'q' is pressed
        if stopCount == stops or (cv2.waitKey(1) & 0xFF == ord('q')):
            # clean up
            motor.stop()
            distance_detect.cleanup()
            cv2.destroyAllWindows()
            break
Esempio n. 27
0
def forward():
    motor.forward(1)
Esempio n. 28
0
    # http://ozzmaker.com/2013/04/18/success-with-a-balancing-robot-using-a-raspberry-pi/
    accAngX = (math.atan2(rate_accX, rate_accY) + M_PI) * RAD_TO_DEG
    CFangleX = K * (CFangleX + rate_gyroX * time_diff) + (1 - K) * accAngX

    # http://blog.bitify.co.uk/2013/11/reading-data-from-mpu-6050-on-raspberry.html
    accAngX1 = get_x_rotation(rate_accX, rate_accY, rate_accX)
    CFangleX1 = (K * (CFangleX1 + rate_gyroX * time_diff) + (1 - K) * accAngX1)

    # Followed the Second example because it gives resonable pid reading
    pid = int(p.update(CFangleX1))  #Add the sampling time in calculating PID
    speed = pid
    #print speed

    if (pid > 0):
        print "forward"
        MOTOR.forward(speed)
    elif (pid < 0):
        print "backward"
        MOTOR.backward(abs(speed))
    else:
        print "stop"
        #MOTOR.forward(speed)
        #MOTOR.backward( abs(speed) )
        MOTOR.stop(
        )  #Copy Paste forward and backward code here instead of stop()

    #print(j)
    #print "{0:.2f} {1:.2f} {2:.2f} {3:.2f} | {4:.2f} {5:.2f} | {6:.2f} | {7:.2f} ".format( gyroAngleX, gyroAngleY , accAngX, CFangleX, accAngX1, CFangleX1, pid, speed)
    #print "{0:.2f} {1:.2f} {2:.2f}".format( gyroAngleX, accAngX, CFangleX)
#print "{0:.2f} {1:.2f}".format( sensor.read_pitch(), sensor.read_roll())
Esempio n. 29
0
def f():
    forward()
    return jsonify({"message":"Started moving forward"})
Esempio n. 30
0
        def __init__(self):
            threading.Thread.__init__(self)
            self.model = load_model(os.path.join('..', '..', 'models', 'sign_classification')

        def run(self):
            while True:
                if output_full is not None:
                    global stop_detected
                    global lstop_detected
                    stop_detected = self.model.predict(output_full)
                    if stop_detected:
                        sleep(1)
                        lstop_detected = True
                        stop_detected = False
                        sleep(1)

    threadss = SignsThread()
    threadss.start()

    time.sleep(2)

    ultrason = ultrasonic.UltrasonicAsync(sleep_time)
    ultrason.start()
    obstacleExist = False

    motor.forward()

    while True:
        data = ''

        # Check of obstacles
        if ultrason.dist < 20:
            motor.ctrl(0)
            obstacleExist = True

        while obstacleExist:
            time.sleep(sleep_time)
            if ultrason.dist < 20:
                obstacleExist = False
                motor.forward()

        # Take input from camera
        output_full2 = np.empty(480 * 640 * 3, dtype=np.uint8)
        cam.capture(output_full2, 'bgr', use_video_port=True)

        output_full = output_full2.reshape((480, 640, 3))
        crop_idx = int(output_full.shape[0] / 2)
        output = output_full[crop_idx:, :]

        img_detection = imresize(output, (120, 160))

        pred = model.predict(img_detection.reshape((1, 120, 160, 3)))
        speed_pred = pred[1][0][0]
        motor.setSpeed(int(speed_pred * 62 + 40))
        pred = pred[0][0][0]

        i += 1

        data = ctrl_cmd[0]
        if stop_detected:
            data = ctrl_cmd[4]
Esempio n. 31
0
	while True:
		if csock is None:
			print("Waiting for connection...")
			csock, addr_info = ssock.accept()
			print("Get connection from", addr_info)
		else:
			total_data = csock.recv(BUFSIZE)
			total_data = total_data.decode()

			forward = total_data[0]
			direction = total_data[1]

			if (forward == "3") :
				speed = 35
				motor.setSpeed(speed)
				motor.forward()

			elif (forward == "2") :
				speed = 25
				motor.setSpeed(speed)
				motor.forward()

			elif (forward =="1"):
				speed = 25
				motor.setSpeed(speed)
				motor.backward()

			elif (forward == "0") :
				speed = 30
				motor.setSpeed(speed)
				motor.backward()
Esempio n. 32
0
def on_message(ws, data):
    print data
    # Analyze the command received and control the car accordingly.
    #if not data:

    if data == ctrl_cmd[0]:
        print 'motor moving forward'
        motor.forward()
    elif data == ctrl_cmd[1]:
        print 'recv backward cmd'
        motor.backward()
    elif data == ctrl_cmd[2]:
        print 'recv left cmd'
        car_dir.turn_left()
    elif data == ctrl_cmd[3]:
        print 'recv right cmd'
        car_dir.turn_right()
    elif data == ctrl_cmd[6]:
        print 'recv home cmd'
        car_dir.home()
    elif data == ctrl_cmd[4]:
        print 'recv stop cmd'
        motor.ctrl(0)
    elif data == ctrl_cmd[5]:
        print 'read cpu temp...'
        temp = cpu_temp.read()
        tcpCliSock.send('[%s] %0.2f' % (ctime(), temp))
    elif data == ctrl_cmd[8]:
        print 'recv x+ cmd'
        video_dir.move_increase_x()
    elif data == ctrl_cmd[9]:
        print 'recv x- cmd'
        video_dir.move_decrease_x()
    elif data == ctrl_cmd[10]:
        print 'recv y+ cmd'
        video_dir.move_increase_y()
    elif data == ctrl_cmd[11]:
        print 'recv y- cmd'
        video_dir.move_decrease_y()
    elif data == ctrl_cmd[12]:
        print 'home_x_y'
        video_dir.home_x_y()
    elif data[0:12] == ctrl_cmd[13]:     # Change the speed
        print data
        #numLen = len(data) - len('speed')
        #if numLen == 1 or numLen == 2 or numLen == 3:
        #    tmp = data[-numLen:]
        #    print 'tmp(str) = %s' % tmp
        #    spd = int(tmp)
        #    print 'spd(int) = %d' % spd
        #    if spd < 24:
        #        spd = 24
        motor.setSpeed(30)
    elif data[0:5] == 'turn=':	#Turning Angle
        print 'data =', data
        angle = data.split('=')[1]
        try:
            angle = int(angle)
            car_dir.turn(angle)
        except:
            print 'Error: angle =', angle
    elif data[0:8] == 'forward=':
        print 'data =', data
        spd = 30
        try:
            spd = int(spd)
            motor.forward(spd)
        except:
            print 'Error speed =', spd
    elif data[0:9] == 'backward=':
        print 'data =', data
        spd = data.split('=')[1]
        try:
            spd = int(spd)
            motor.backward(spd)
        except:
            print 'ERROR, speed =', spd

    else:
        print 'Command Error! Cannot recognize command: ' + data
Esempio n. 33
0
import numpy as np
from camera import getImage
from cnn import checkModel
from motor import forward, left, right
from config import FB_TIME, LR_TIME

model = checkModel()
while True:
	input_img, errors = getImage()
	output = model.predict(input_img)
	print(output, output.shape )
	action = np.argmax(output[0])
	if int(action) == 0:
		forward(FB_TIME)
		print('forward')
	elif int(action) == 1:
		right(LR_TIME)
		print('right')
	else:
		left(LR_TIME)
		print('left')

def run_server(key):
    #!/usr/bin/env python
    import RPi.GPIO as GPIO
    import video_dir
    import car_dir
    import motor
    from time import ctime  # Import necessary modules

    ctrl_cmd = [
        'forward', 'backward', 'left', 'right', 'stop', 'read cpu_temp',
        'home', 'distance', 'x+', 'x-', 'y+', 'y-', 'xy_home'
    ]

    busnum = 1  # Edit busnum to 0, if you uses Raspberry Pi 1 or 0

    HOST = ''  # The variable of HOST is null, so the function bind( ) can be bound to all valid addresses.
    PORT = 21567
    BUFSIZ = 1024  # Size of the buffer
    ADDR = (HOST, PORT)

    tcpSerSock = socket(AF_INET, SOCK_STREAM)  # Create a socket.
    tcpSerSock.bind(ADDR)  # Bind the IP address and port number of the server.
    tcpSerSock.listen(
        5
    )  # The parameter of listen() defines the number of connections permitted at one time. Once the
    # connections are full, others will be rejected.

    #video_dir.setup(busnum=busnum)
    #car_dir.setup(busnum=busnum)
    #motor.setup(busnum=busnum)     # Initialize the Raspberry Pi GPIO connected to the DC motor.
    #video_dir.home_x_y()
    #car_dir.home()

    while True:
        print 'Waiting for connection...'
        # Waiting for connection. Once receiving a connection, the function accept() returns a separate
        # client socket for the subsequent communication. By default, the function accept() is a blocking
        # one, which means it is suspended before the connection comes.
        tcpCliSock, addr = tcpSerSock.accept()
        print '...connected from :', addr  # Print the IP address of the client connected with the server.

        while True:
            try:
                data = ''
                data = tcpCliSock.recv(
                    BUFSIZ)  # Receive data sent from the client.
                print "\nEncrypted command recieved from client = ,", eval(
                    data)[2]
                data = crypto.AES_decrypt(eval(data), key)
            except:
                print "INCOMPLETE PACKET ERROR - try to input the command again"
            # Analyze the command received and control the car accordingly.

            if not data:
                break
            if data == ctrl_cmd[0]:
                print 'motor moving forward'
                motor.forward()
            elif data == ctrl_cmd[1]:
                print 'recv backward cmd'
                motor.backward()
            elif data == ctrl_cmd[2]:
                print 'recv left cmd'
                car_dir.turn_left()
            elif data == ctrl_cmd[3]:
                print 'recv right cmd'
                car_dir.turn_right()
            elif data == ctrl_cmd[6]:
                print 'recv home cmd'
                car_dir.home()
            elif data == ctrl_cmd[4]:
                print 'recv stop cmd'
                motor.ctrl(0)
            elif data == ctrl_cmd[5]:
                print 'read cpu temp...'
                temp = cpu_temp.read()
                tcpCliSock.send('[%s] %0.2f' % (ctime(), temp))
            elif data == ctrl_cmd[8]:
                print 'recv x+ cmd'
                video_dir.move_increase_x()
            elif data == ctrl_cmd[9]:
                print 'recv x- cmd'
                video_dir.move_decrease_x()
            elif data == ctrl_cmd[10]:
                print 'recv y+ cmd'
                video_dir.move_increase_y()
            elif data == ctrl_cmd[11]:
                print 'recv y- cmd'
                video_dir.move_decrease_y()
            elif data == ctrl_cmd[12]:
                print 'home_x_y'
                video_dir.home_x_y()
            elif data[0:5] == 'speed':  # Change the speed
                print data
                numLen = len(data) - len('speed')
                if numLen == 1 or numLen == 2 or numLen == 3:
                    tmp = data[-numLen:]
                    print 'tmp(str) = %s' % tmp
                    spd = int(tmp)
                    print 'spd(int) = %d' % spd
                    if spd < 24:
                        spd = 24
                    motor.setSpeed(spd)
            elif data[0:5] == 'turn=':  #Turning Angle
                print 'data =', data
                angle = data.split('=')[1]
                try:
                    angle = int(angle)
                    car_dir.turn(angle)
                except:
                    print 'Error: angle =', angle
            elif data[0:8] == 'forward=':
                print 'data =', data
                spd = data[8:]
                try:
                    spd = int(spd)
                    motor.forward(spd)
                except:
                    print 'Error speed =', spd

            elif data[0:9] == 'backward=':
                print 'data =', data
                spd = data.split('=')[1]
                try:
                    spd = int(spd)
                    motor.backward(spd)
                except:
                    print 'ERROR, speed =', spd

            else:
                #print 'Command Error! Cannot recognize command: ' + data
                print "COMMAND ERROR - Unable to interpret recieved command"
    tcpSerSock.close()
Esempio n. 35
0
 def forward(self, speed=None):
     if speed is None:
         motor.forward()
     else:
         motor.forwardWithSpeed(int(speed))
     return self.ack()
Esempio n. 36
0
joy = xbox.Joystick()

# Setup motors
GPIO.setmode(GPIO.BOARD)
motor.setup()

try:
    #Valid connect may require joystick input to occur
    print "Waiting for Joystick to connect"
    while not joy.connected():
        time.sleep(0.10)

    print('Connected')
    #Show misc inputs until Back button is pressed    
    while not joy.Back() and joy.connected():
        if joy.rightTrigger() > 0:
            motor.forward()
        elif joy.leftTrigger() > 0:
            motor.backward()
        elif joy.rightBumper():
            motor.right()
        elif joy.leftBumper():
            motor.left()
        else:
            motor.stop()

finally:
    #Always close out so that xboxdrv subprocess ends
    joy.close()
    print "Done."
Esempio n. 37
0
	# one, which means it is suspended before the connection comes.
	tcpCliSock, addr = tcpSerSock.accept() 
	print ('...connected from :'+ str(addr))     # Print the IP address of the client connected with the server.

	while True:
		data = ''
		data = tcpCliSock.recv(BUFSIZ)    # Receive data sent from the client. 
		# Analyze the command received and control the car accordingly.
		if not data:
			break

		print('data : ' + str(data))

		elif data == ctrl_cmd[0]:
			
			motor.forward()
		elif data == ctrl_cmd[1]:
			
			motor.backward()
		elif data == ctrl_cmd[2]:
			
			car_dir.turn_left()
		elif data == ctrl_cmd[3]:
			
			car_dir.turn_right()
		elif data == ctrl_cmd[6]:
			
			car_dir.home()
		elif data == ctrl_cmd[4]:
			
			motor.ctrl(0)
Esempio n. 38
0
def forward():
	motor.forward(1)