def main(): #broker = redis.StrictRedis() #broker = RedisClient().conn #sub = broker.pubsub() global dcm sub = BROKER() try: dcm = init_dc_motors() sub.subscribe(callback, DCM_CH) # while True: # #message = sub.get_message() # topic, message = sub.get() # if message != None: # log.debug((message)) # data = json.loads(message) # log.debug("DCMC: received data:") # log.debug(data) # action = data['action'] # speed = int(data['speed']) # time_limit = 0 # if 'time_limit' in data: # time_limit = float(data['time_limit']) # if not (dcm is None): # execute_action(dcm, action, speed, time_limit) # else: # log.error("dcm was not initialized not performing action!") # sleep(0.2) except Exception as ex: log.error("exception in DCMController") log.error(ex, exc_info=True) dcm.clean_up()
def main(): #broker = redis.StrictRedis() #broker = RedisClient().conn #sub = broker.pubsub() global sc try: ''' UP/DOWN servo => pin 13 LEFT/RIGHT servo => pin 19 ''' sub = BROKER() sc = SERVO() sub.subscribe(callback, SC_CH) # while True: # topic, message = sub.get() # if message != None: # log.debug(message) # data = json.loads(message) # log.debug("SC: received data:") # log.debug(data) # action = data['action'] # angle = int(data['angle']) # if not (sc is None): # execute_action(sc, action, angle) # else: # log.error("servos were not initialized not performing action!") # sleep(0.2) except Exception as ex: log.error("exception in ServosController") log.error(ex, exc_info=True) sc.clean_up()
def main(): try: #broker = redis.StrictRedis() #broker = RedisClient().conn #sub = broker.pubsub() #tts = TTS() sub = BROKER() sub.subscribe(callback, TTSC_CH) # while True: # topic, message = sub.get_b() # if message != None: # log.debug(message) # data = json.loads(message) # log.debug("TTSC: received data:") # log.debug(data) # action = data['action'] # sentence = data['sentence'] # #broker.publish(HC_CH, '{"action": "play", "anime": "talk"}') # tts.speak(sentence, broker) # #broker.publish(HC_CH, '{"action": "play", "anime": "standby"}') # sleep(0.4) except Exception as ex: log.error("error in TTSController") log.error(ex, exc_info=True)
def main(): try: ava = AVA() #ava.set_motion(ava.CONFIG.AVA_STANDBY_ANIME) execute_action(ava, 'play', 'standby') print(ava.CONFIG.AVA_STANDBY_ANIME + "----------") #sleep(10) #ava.set_motion(ava.CONFIG.AVA_TALK_ANIME) #broker = redis.StrictRedis() #broker = RedisClient().conn #sub = broker.pubsub() sub = BROKER() sub.subscribe(callback, HC_CH) # while True: # ava.process_events() # topic, message = sub.get() # if message != None: # log.debug(message) # data = json.loads(message) # log.debug("HC: received data:") # log.debug(data) # action = data['action'] # anime = data['anime'] # execute_action(ava, action, anime) # sleep(0.4) except Exception as ex: log.error("error in HeadController") log.error(ex, exc_info=True)
def __init__(self, frame_w=320, frame_h=320, use_rabbit=False): self.FRAME_W = frame_w self.FRAME_H = frame_h self.USE_RABBIT = use_rabbit if not self.USE_RABBIT: self.srv = SERVO() else: self.broker = BROKER()
def main(): try: # capture SIGINT signal, e.g., Ctrl+C signal.signal(signal.SIGINT, signal_handler) #broker = redis.StrictRedis() #broker = RedisClient().conn broker = BROKER() sensitivity = [0.5] * len(models) detector = snowboydecoder.HotwordDetector(models, sensitivity=sensitivity) callbacks = [ lambda: execute_action('ava', broker), lambda: execute_action('ava_stop', broker), lambda: execute_action('ava_go', broker), lambda: execute_action('ava_back', broker), lambda: execute_action('ava_turn_left', broker), lambda: execute_action('ava_turn_right', broker), lambda: execute_action('ava_who_are_you', broker), lambda: execute_action('ava_dance', broker), lambda: execute_action('ava_follow_me', broker) ] log.debug('Listening... ') # main loop # make sure you have the same numbers of callbacks and models detector.start(detected_callback=callbacks, interrupt_check=interrupt_callback, sleep_time=0.1) detector.terminate() except Exception as ex: log.error("exception in AgentController") log.error(ex, exc_info=True)
def background_job(us, broker): b = BROKER() while True: d = us.get_distance() #log.debug("background job distance: {}".format(d)) if d > us._WARN_DIST_: pass # elif (d <= us._WARN_DIST_ and d > us._STOP_DIST_): # log.debug('US Warning: getting close') # log.debug('US distance: {}'.format(d)) # data = '{"action": "slowdown", "distance": "' + str(d) + '"}' # b.publish(MAUC_CH, data) elif d <= us._STOP_DIST_: log.debug('US Danger: going to crash') log.debug('US distance: {}'.format(d)) data = '{"action": "stopandturn", "distance": "' + str(d) + '"}' b.publish(MAUC_CH, data) time.sleep(6) time.sleep(1)
def __init__(self, frame_size=(320, 320), resolution=(1280, 720), framerate=32, USE_RABBIT=False, cal=False): self.frame_size = frame_size # initialize the camera and stream self.camera = PiCamera() self.camera.resolution = resolution self.camera.framerate = framerate self.camera.awb_mode = 'auto' self.camera.exposure_mode = 'auto' # As a safey in case the awb_mode is set to off before these values are set will break the camera driver self.camera.awb_gains = (1.0, 1.0) #self.camera.rotation = 90 self.camera.vflip = False self.camera.hflip = True self.rawCapture = PiRGBArray(self.camera, size=self.frame_size) self.stream = self.camera.capture_continuous(self.rawCapture, format="bgr", resize=self.frame_size, use_video_port=True) # initialize the frame and the variable used to indicate # if the thread should be stopped self.frame = None self.stopped = False # Set the vaiables used to control the White balance self.cali = cal self.rGain = 1.0 self.bGain = 1.0 self.str = 'EMPTY' # USE_RABBIT self.USE_RABBIT = USE_RABBIT if self.USE_RABBIT == True: self.broker = BROKER()
def main(): try: global left_dist global right_dist global broker broker = BROKER() broker.subscribe(callback, MAUC_CH) # while True: # topic, message = broker.get() # if message != None: # log.debug(message) # data = json.loads(message) # log.debug("MainAUC: received data:") # log.debug(data) # execute_action(data, broker) # sleep(0.2) except Exception as ex: log.error("exception in MainAUC") log.error(ex, exc_info=True)
def main(): try: global r global dt r = BROKER() app.jinja_env.auto_reload = True app.config['TEMPLATES_AUTO_RELOAD'] = True handler = RotatingFileHandler('AMSpiServer.log', maxBytes=10000, backupCount=1) handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") handler.setFormatter(formatter) app.logger.addHandler(handler) start_runner() dt = Detector(use_tft=True) #thread = threading.Thread(target=start_detector) #thread.start() app.run(debug=False, host='0.0.0.0', port='3000') except Exception as ex: log.error("Exception in Main flask server") log.error(ex, exc_info=True) json = '{"action": "exit", "speed": "0", "time_limit": "0"}' r.publish('DCMC', json)
def main(): try: global us global broker us = DSensor() #broker = redis.StrictRedis() #broker = RedisClient().conn #sub = broker.pubsub() broker = BROKER() #sub = BROKER() thread = threading.Thread(target=background_job, args=(us, broker)) thread.daemon = True thread.start() broker.subscribe(callback, US_CH) # while True: # background_job(us, broker) # time.sleep(2) # while True: # topic, message = sub.get() # if message != None: # log.debug(message) # data = json.loads(message) # log.debug("US: received data:") # log.debug(data) # action = data['action'] # if not (us is None): # execute_action(us, action, broker) # else: # log.error("servos were not initialized not performing action!") # else: # background_job(us, broker) # time.sleep(1) except Exception as ex: log.error("exception in USController") log.error(ex, exc_info=True) us.clean_up()
def __init__(self, threshold = 0.5, CAMERA_WIDTH = 320, CAMERA_HEIGHT = 320, use_rabbit=False, use_tft=False): self.CAMERA_WIDTH = CAMERA_WIDTH self.CAMERA_HEIGHT = CAMERA_HEIGHT self.USE_TFT = use_tft self.threshold = threshold self.USE_RABBIT = use_rabbit self.labels_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "coco_labels.txt") self.model_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "models", "ssd_mobilenet_v3_small_coco_2020_01_14", "model.tflite") self.labels = self.load_labels(self.labels_path) if self.USE_RABBIT: self.broker = BROKER() self.capture_manager = PiVideoStream(resolution=(self.CAMERA_WIDTH, self.CAMERA_HEIGHT), framerate=30, USE_RABBIT=self.USE_RABBIT, cal=False).start() self.capture_manager.start() # allow the camera to warmup time.sleep(0.2)
# coord_process = multiprocessing.Process(target=coord,args=(priority_queue, out_queue, )) # coord_process.daemon = True # coord_process.start() # processes.append(coord_process) #=========================================================================== # Since we spawned all the necessary processes already, # restore default signal handling for the parent process. signal.signal(signal.SIGINT, default_handler) try: RGB.get_instance().standby() for process in processes: process.join() except KeyboardInterrupt: log.error("AYB: caught KeyboardInterrupt, killing processes") #broker = redis.StrictRedis() #broker = RedisClient().conn broker = BROKER() broker.publish('DCMC', '{"action": "exit", "speed": "0", "time_limit": "0"}') broker.publish('SC', '{"action": "exit", "angle": "-1"}') broker.publish('US', '{"action": "exit", "distance": "0"}') sleep(1) for process in processes: process.terminate() manager.shutdown() finally: manager.shutdown() RGB.get_instance().clean_up()
class PiVideoStream: camera = None stream = None rawCapture = None USE_RABBIT = False broker = None frame_size = (320, 320) def __init__(self, frame_size=(320, 320), resolution=(1280, 720), framerate=32, USE_RABBIT=False, cal=False): self.frame_size = frame_size # initialize the camera and stream self.camera = PiCamera() self.camera.resolution = resolution self.camera.framerate = framerate self.camera.awb_mode = 'auto' self.camera.exposure_mode = 'auto' # As a safey in case the awb_mode is set to off before these values are set will break the camera driver self.camera.awb_gains = (1.0, 1.0) #self.camera.rotation = 90 self.camera.vflip = False self.camera.hflip = True self.rawCapture = PiRGBArray(self.camera, size=self.frame_size) self.stream = self.camera.capture_continuous(self.rawCapture, format="bgr", resize=self.frame_size, use_video_port=True) # initialize the frame and the variable used to indicate # if the thread should be stopped self.frame = None self.stopped = False # Set the vaiables used to control the White balance self.cali = cal self.rGain = 1.0 self.bGain = 1.0 self.str = 'EMPTY' # USE_RABBIT self.USE_RABBIT = USE_RABBIT if self.USE_RABBIT == True: self.broker = BROKER() def custom_awb(self): # get r,g,b values from the image b, g, r = cv2.split(self.frame) b = np.mean(b) g = np.mean(g) r = np.mean(r) # Adjust R and B relative to G, but only if they're significantly # different (delta +/- 2) if (abs(r - g) > 4): if (r > g): if (self.rGain > 0.5): self.rGain -= 0.01 else: if (self.rGain < 7.98): self.rGain += 0.01 if (abs(b - g) > 4): if (b > g): if (self.bGain > 0.5): self.bGain -= 0.01 else: if (self.bGain < 7.98): self.bGain += 0.01 if g < 95: if (self.camera.brightness <= 99): self.camera.brightness += 1 elif g > 105: if (self.camera.brightness >= 2): self.camera.brightness -= 1 camera.awb_gains = (self.rGain, self.bGain) self.str = 'rGain: %f\tbGain: %f\tBrightness %i R: %i, G: %i, B: %i\n' % ( self.rGain, self.bGain, self.camera.brightness, r, g, b) log.debug(self.str) def debug(self): # debug info return self.str def start(self): # start the thread to read frames from the video stream Thread(target=self.update).start() log.debug("Waiting for camera to warm up.") time.sleep(2) ## wait for camera to warm up !! log.debug("Done! - Camera is read to use") return self ## do we need this? def update(self): # keep looping infinitely until the thread is stopped for f in self.stream: # grab the frame from the stream and clear the stream in # preparation for the next frame self.frame = f.array # if we init with options act on them if (self.USE_RABBIT == True): PiVideoStream.pub_image(self) # if the thread indicator variable is set, stop the thread # and release camera resources if self.stopped: self.stream.close() self.rawCapture.close() self.camera.close() return #time.sleep(1/(self.camera.framerate - 2)) ## time to sleep dependant on framrate self.rawCapture.truncate(0) def read(self): # return the frame most recently read return self.frame def stop(self): # indicate that the thread should be stopped self.stopped = True log.debug("PiCam_thread stopped!") def set_params(self, data): if (data.red_gain >= 1.0 and data.red_gain <= 8.0): self.rGain = data.red_gain if (data.blue_gain >= 1.0 and data.blue_gain <= 8.0): self.bGain = data.blue_gain self.camera.brightness = data.brightness self.camera.iso = data.iso self.camera.awb_mode = data.awb_mode self.camera.exposure_mode = data.exposure_mode self.camera.awb_gains = (self.rGain, self.bGain) self.str = '\n\tawb_mode: %s\n\texposure_mode: %s\n\trGain: %d bGain: %d\n\tiso: %i\n\tBrightness %i' % ( self.camera.awb_mode, self.camera.exposure_mode, self.rGain, self.bGain, self.camera.iso, self.camera.brightness) log.debug(self.str) def pub_image(self): if self.USE_RABBIT == False: str0 = "USE_RABBIT not initialised" log.debug(str0) else: # Create image object msg = types.SimpleNamespace(header=lambda: { 'frame_id': None, 'stamp': None }, data=None) #msg = Image() msg.header.frame_id = 'base_link' now = datetime.now() current_time = now.strftime("%H:%M:%S") msg.header.stamp = current_time # Encode image msg.data = np.array(cv2.imencode('.jpg', self.frame)[1]).tostring() # Publish new image self.broker.publish('stream', msg.data)
class Tracker: FRAME_W = 320 FRAME_H = 320 USE_RABBIT = False srv = None broker = None SC_CH = 'SC' CURRENT_PAN = 0 CURRENT_TILT = 25 NEUTRAL_PAN = 0 NEUTRAL_TILT = 25 PAN_TILT_THRESHOLD = 10 PAN_TILT_ERROR = 10 def __init__(self, frame_w=320, frame_h=320, use_rabbit=False): self.FRAME_W = frame_w self.FRAME_H = frame_h self.USE_RABBIT = use_rabbit if not self.USE_RABBIT: self.srv = SERVO() else: self.broker = BROKER() def track_object(self, xmin, ymin, xmax, ymax, frame): xmin = int(xmin * self.FRAME_W) xmax = int(xmax * self.FRAME_W) ymin = int(ymin * self.FRAME_H) ymax = int(ymax * self.FRAME_H) obj_x = int(xmin + ((xmax - xmin) / 2)) obj_y = int(ymin + ((ymax - ymin) / 2)) cv2.circle(frame, (obj_x, obj_y), 4, (251, 255, 0), -1) cv2.line(frame, (0, obj_y), (obj_x, obj_y), (251, 255, 0), 1) cv2.line(frame, (obj_x, 0), (obj_x, obj_y), (251, 255, 0), 1) cv2.line(frame, (160, 160), (obj_x, obj_y), (251, 255, 0), 1) # pixels to move pix_move_x = obj_x - (self.FRAME_W / 2) pix_move_y = obj_y - (self.FRAME_H / 2) #convert pixel to angle where angle may vary from -90 to +90 changex = round((pix_move_x * 9) / 32) changey = round((pix_move_y * 9) / 32) log.debug("changex: {} ---------- changey: {}".format( changex, changey)) # json = '{"action": "moveX", "angle": "-1"}' if abs(changex) > self.PAN_TILT_THRESHOLD or abs( changey) > self.PAN_TILT_THRESHOLD: self.CURRENT_TILT += changex self.CURRENT_PAN += changey if self.CURRENT_TILT >= 0: self.CURRENT_TILT -= self.PAN_TILT_ERROR else: self.CURRENT_TILT += self.PAN_TILT_ERROR if self.CURRENT_PAN >= 0: self.CURRENT_PAN -= self.PAN_TILT_ERROR else: self.CURRENT_PAN += self.PAN_TILT_ERROR if self.CURRENT_TILT > 90: self.CURRENT_TILT = 90 if self.CURRENT_TILT < -90: self.CURRENT_TILT = -90 if self.CURRENT_PAN > 90: self.CURRENT_PAN = 90 if self.CURRENT_PAN < -90: self.CURRENT_PAN = -90 if self.USE_RABBIT and abs(changex) > self.PAN_TILT_THRESHOLD: json = '{"action": "moveX", "angle": "' + str( self.CURRENT_TILT) + '"}' self.broker.publish(self.SC_CH, json) sleep(0.2) if self.USE_RABBIT and abs(changey) > self.PAN_TILT_THRESHOLD: json = '{"action": "moveY", "angle": "' + str( self.CURRENT_PAN) + '"}' self.broker.publish(self.SC_CH, json) sleep(0.2) if not self.USE_RABBIT and abs(changex) > self.PAN_TILT_THRESHOLD: print("not using rabbitmq, tilt: {}".format(self.CURRENT_TILT)) self.srv.tilt(self.CURRENT_TILT) sleep(0.2) if not self.USE_RABBIT and abs(changey) > self.PAN_TILT_THRESHOLD: print("not using rabbitmq, pan: {}".format(self.CURRENT_PAN)) self.srv.pan(self.CURRENT_PAN) sleep(0.2) return frame
#!/usr/bin/env python # -*- coding: utf-8 -*- import numpy as np import cv2 import sys sys.path.append('/home/pi/ava') from utils.RabbitCtl import BROKER def callback(ch, method, properties, body): # create int values from sting np_arr = np.frombuffer(body, np.uint8) # convert data from .jpg encoded image to cv2 format image = cv2.imdecode(np_arr, 1) cv2.imshow('image', image) #print("frame header stamp: {}".format(body.header.stamp)) # show images and delay for 10ms cv2.waitKey(10) if __name__ == '__main__': br = BROKER() br.subscribe(callback, "stream")