Ejemplo n.º 1
0
def background_job(us, broker):
    b = BROKER()
    while True:

        d = us.get_distance()
        #log.debug("background job distance: {}".format(d))
        if d > us._WARN_DIST_:
            pass
        # elif (d <= us._WARN_DIST_ and d > us._STOP_DIST_):
        # log.debug('US Warning: getting close')
        # log.debug('US distance: {}'.format(d))
        # data = '{"action": "slowdown", "distance": "' + str(d) + '"}'
        # b.publish(MAUC_CH, data)
        elif d <= us._STOP_DIST_:
            log.debug('US Danger: going to crash')
            log.debug('US distance: {}'.format(d))
            data = '{"action": "stopandturn", "distance": "' + str(d) + '"}'
            b.publish(MAUC_CH, data)
            time.sleep(6)
        time.sleep(1)
Ejemplo n.º 2
0
def main():
    try:
        global r
        global dt
        r = BROKER()
        app.jinja_env.auto_reload = True
        app.config['TEMPLATES_AUTO_RELOAD'] = True
        handler = RotatingFileHandler('AMSpiServer.log', maxBytes=10000, backupCount=1)
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        app.logger.addHandler(handler)
        start_runner()
        dt = Detector(use_tft=True)
        #thread = threading.Thread(target=start_detector)
        #thread.start()
        
        app.run(debug=False, host='0.0.0.0', port='3000')
    except Exception as ex:
        log.error("Exception in Main flask server")
        log.error(ex, exc_info=True)
        json = '{"action": "exit",  "speed": "0", "time_limit": "0"}'
        r.publish('DCMC', json)
Ejemplo n.º 3
0
    # coord_process = multiprocessing.Process(target=coord,args=(priority_queue, out_queue, ))
    # coord_process.daemon = True
    # coord_process.start()
    # processes.append(coord_process)
    #===========================================================================
    
    
    
    # Since we spawned all the necessary processes already, 
    # restore default signal handling for the parent process. 
    signal.signal(signal.SIGINT, default_handler)
    try:
        RGB.get_instance().standby()
        for process in processes:
            process.join()
    except KeyboardInterrupt:
        log.error("AYB: caught KeyboardInterrupt, killing processes")
        #broker = redis.StrictRedis()
        #broker = RedisClient().conn
        broker = BROKER()
        broker.publish('DCMC', '{"action": "exit",  "speed": "0", "time_limit": "0"}')
        broker.publish('SC', '{"action": "exit",  "angle": "-1"}')
        broker.publish('US', '{"action": "exit", "distance": "0"}')
        sleep(1)
        for process in processes:
            process.terminate()
        manager.shutdown() 
    finally:
        manager.shutdown()
        RGB.get_instance().clean_up()
Ejemplo n.º 4
0
class PiVideoStream:
    camera = None
    stream = None
    rawCapture = None
    USE_RABBIT = False
    broker = None
    frame_size = (320, 320)

    def __init__(self,
                 frame_size=(320, 320),
                 resolution=(1280, 720),
                 framerate=32,
                 USE_RABBIT=False,
                 cal=False):
        self.frame_size = frame_size
        # initialize the camera and stream
        self.camera = PiCamera()
        self.camera.resolution = resolution
        self.camera.framerate = framerate
        self.camera.awb_mode = 'auto'
        self.camera.exposure_mode = 'auto'

        # As a safey in case the awb_mode is set to off before these values are set will break the camera driver
        self.camera.awb_gains = (1.0, 1.0)

        #self.camera.rotation = 90
        self.camera.vflip = False
        self.camera.hflip = True
        self.rawCapture = PiRGBArray(self.camera, size=self.frame_size)
        self.stream = self.camera.capture_continuous(self.rawCapture,
                                                     format="bgr",
                                                     resize=self.frame_size,
                                                     use_video_port=True)

        # initialize the frame and the variable used to indicate
        # if the thread should be stopped
        self.frame = None
        self.stopped = False

        # Set the vaiables used to control the White balance
        self.cali = cal
        self.rGain = 1.0
        self.bGain = 1.0
        self.str = 'EMPTY'

        # USE_RABBIT
        self.USE_RABBIT = USE_RABBIT
        if self.USE_RABBIT == True:
            self.broker = BROKER()

    def custom_awb(self):
        # get r,g,b values from the image
        b, g, r = cv2.split(self.frame)
        b = np.mean(b)
        g = np.mean(g)
        r = np.mean(r)

        # Adjust R and B relative to G, but only if they're significantly
        # different (delta +/- 2)
        if (abs(r - g) > 4):
            if (r > g):
                if (self.rGain > 0.5):
                    self.rGain -= 0.01
            else:
                if (self.rGain < 7.98):
                    self.rGain += 0.01
        if (abs(b - g) > 4):
            if (b > g):
                if (self.bGain > 0.5):
                    self.bGain -= 0.01
            else:
                if (self.bGain < 7.98):
                    self.bGain += 0.01
        if g < 95:
            if (self.camera.brightness <= 99):
                self.camera.brightness += 1
        elif g > 105:
            if (self.camera.brightness >= 2):
                self.camera.brightness -= 1

        camera.awb_gains = (self.rGain, self.bGain)
        self.str = 'rGain: %f\tbGain: %f\tBrightness %i R: %i, G: %i, B: %i\n' % (
            self.rGain, self.bGain, self.camera.brightness, r, g, b)
        log.debug(self.str)

    def debug(self):
        # debug info
        return self.str

    def start(self):
        # start the thread to read frames from the video stream
        Thread(target=self.update).start()
        log.debug("Waiting  for camera to warm up.")
        time.sleep(2)  ## wait for camera to warm up !!
        log.debug("Done! - Camera is read to use")
        return self  ## do we need this?

    def update(self):
        # keep looping infinitely until the thread is stopped
        for f in self.stream:

            # grab the frame from the stream and clear the stream in
            # preparation for the next frame
            self.frame = f.array

            # if we init with options act on them
            if (self.USE_RABBIT == True):
                PiVideoStream.pub_image(self)
            # if the thread indicator variable is set, stop the thread
            # and release camera resources
            if self.stopped:
                self.stream.close()
                self.rawCapture.close()
                self.camera.close()
                return
            #time.sleep(1/(self.camera.framerate - 2)) ## time to sleep dependant on framrate
            self.rawCapture.truncate(0)

    def read(self):
        # return the frame most recently read
        return self.frame

    def stop(self):
        # indicate that the thread should be stopped
        self.stopped = True
        log.debug("PiCam_thread stopped!")

    def set_params(self, data):

        if (data.red_gain >= 1.0 and data.red_gain <= 8.0):
            self.rGain = data.red_gain

        if (data.blue_gain >= 1.0 and data.blue_gain <= 8.0):
            self.bGain = data.blue_gain

        self.camera.brightness = data.brightness
        self.camera.iso = data.iso
        self.camera.awb_mode = data.awb_mode
        self.camera.exposure_mode = data.exposure_mode
        self.camera.awb_gains = (self.rGain, self.bGain)
        self.str = '\n\tawb_mode: %s\n\texposure_mode: %s\n\trGain: %d  bGain: %d\n\tiso: %i\n\tBrightness %i' % (
            self.camera.awb_mode, self.camera.exposure_mode, self.rGain,
            self.bGain, self.camera.iso, self.camera.brightness)
        log.debug(self.str)

    def pub_image(self):

        if self.USE_RABBIT == False:
            str0 = "USE_RABBIT not initialised"
            log.debug(str0)

        else:
            # Create image object
            msg = types.SimpleNamespace(header=lambda: {
                'frame_id': None,
                'stamp': None
            },
                                        data=None)

            #msg = Image()
            msg.header.frame_id = 'base_link'
            now = datetime.now()
            current_time = now.strftime("%H:%M:%S")
            msg.header.stamp = current_time

            # Encode image
            msg.data = np.array(cv2.imencode('.jpg', self.frame)[1]).tostring()

            # Publish new image
            self.broker.publish('stream', msg.data)
Ejemplo n.º 5
0
class Tracker:
    FRAME_W = 320
    FRAME_H = 320
    USE_RABBIT = False
    srv = None
    broker = None
    SC_CH = 'SC'
    CURRENT_PAN = 0
    CURRENT_TILT = 25
    NEUTRAL_PAN = 0
    NEUTRAL_TILT = 25
    PAN_TILT_THRESHOLD = 10
    PAN_TILT_ERROR = 10

    def __init__(self, frame_w=320, frame_h=320, use_rabbit=False):
        self.FRAME_W = frame_w
        self.FRAME_H = frame_h
        self.USE_RABBIT = use_rabbit
        if not self.USE_RABBIT:
            self.srv = SERVO()
        else:
            self.broker = BROKER()

    def track_object(self, xmin, ymin, xmax, ymax, frame):
        xmin = int(xmin * self.FRAME_W)
        xmax = int(xmax * self.FRAME_W)
        ymin = int(ymin * self.FRAME_H)
        ymax = int(ymax * self.FRAME_H)
        obj_x = int(xmin + ((xmax - xmin) / 2))
        obj_y = int(ymin + ((ymax - ymin) / 2))
        cv2.circle(frame, (obj_x, obj_y), 4, (251, 255, 0), -1)
        cv2.line(frame, (0, obj_y), (obj_x, obj_y), (251, 255, 0), 1)
        cv2.line(frame, (obj_x, 0), (obj_x, obj_y), (251, 255, 0), 1)
        cv2.line(frame, (160, 160), (obj_x, obj_y), (251, 255, 0), 1)
        # pixels to move
        pix_move_x = obj_x - (self.FRAME_W / 2)
        pix_move_y = obj_y - (self.FRAME_H / 2)
        #convert pixel to angle where angle may vary from -90 to +90
        changex = round((pix_move_x * 9) / 32)
        changey = round((pix_move_y * 9) / 32)
        log.debug("changex: {} ---------- changey: {}".format(
            changex, changey))
        # json = '{"action": "moveX",  "angle": "-1"}'
        if abs(changex) > self.PAN_TILT_THRESHOLD or abs(
                changey) > self.PAN_TILT_THRESHOLD:
            self.CURRENT_TILT += changex
            self.CURRENT_PAN += changey
            if self.CURRENT_TILT >= 0:
                self.CURRENT_TILT -= self.PAN_TILT_ERROR
            else:
                self.CURRENT_TILT += self.PAN_TILT_ERROR

            if self.CURRENT_PAN >= 0:
                self.CURRENT_PAN -= self.PAN_TILT_ERROR
            else:
                self.CURRENT_PAN += self.PAN_TILT_ERROR

            if self.CURRENT_TILT > 90:
                self.CURRENT_TILT = 90
            if self.CURRENT_TILT < -90:
                self.CURRENT_TILT = -90
            if self.CURRENT_PAN > 90:
                self.CURRENT_PAN = 90
            if self.CURRENT_PAN < -90:
                self.CURRENT_PAN = -90
            if self.USE_RABBIT and abs(changex) > self.PAN_TILT_THRESHOLD:

                json = '{"action": "moveX",  "angle": "' + str(
                    self.CURRENT_TILT) + '"}'
                self.broker.publish(self.SC_CH, json)
                sleep(0.2)
            if self.USE_RABBIT and abs(changey) > self.PAN_TILT_THRESHOLD:
                json = '{"action": "moveY",  "angle": "' + str(
                    self.CURRENT_PAN) + '"}'
                self.broker.publish(self.SC_CH, json)
                sleep(0.2)
            if not self.USE_RABBIT and abs(changex) > self.PAN_TILT_THRESHOLD:
                print("not using rabbitmq, tilt: {}".format(self.CURRENT_TILT))
                self.srv.tilt(self.CURRENT_TILT)
                sleep(0.2)
            if not self.USE_RABBIT and abs(changey) > self.PAN_TILT_THRESHOLD:
                print("not using rabbitmq, pan: {}".format(self.CURRENT_PAN))
                self.srv.pan(self.CURRENT_PAN)
                sleep(0.2)

        return frame