Exemple #1
0
 def __init__(self, config, broker_q, door_detector):
     self.outputFrame = SingletonBlockingQueue()
     self.active_video_feeds = 0
     self.config = config
     self.broker_q = broker_q
     self.door_detector = door_detector
     self.motion_detector = SimpleMotionDetector(config)
Exemple #2
0
 def __init__(self, rtmp_url, **kwargs):
     log.info("rtmp capture init START")
     self.vcap = cv2.VideoCapture(rtmp_url)
     log.info("rtmp capture init END")
     self.stopped = False
     self.fps = FPS(50, 100)
     self.frame_singleton = SingletonBlockingQueue()
Exemple #3
0
    def __init__(self,
                 resolution=(320, 240),
                 framerate=32,
                 format='rgb',
                 **kwargs):
        # initialize the camera
        self.camera = PiCamera()

        # set camera parameters
        self.camera.resolution = resolution
        self.camera.framerate = framerate

        # set optional camera parameters (refer to PiCamera docs)
        for (arg, value) in kwargs.items():
            setattr(self.camera, arg, value)

        # initialize the stream
        self.rawCapture = PiRGBArray(self.camera, size=resolution)
        self.stream = self.camera.capture_continuous(self.rawCapture,
                                                     format=format,
                                                     use_video_port=True)

        # initialize the frame and the variable used to indicate
        # if the thread should be stopped
        self.stopped = False

        self.fps = FPS(50, 100)
        self.frame_singleton = SingletonBlockingQueue()
Exemple #4
0
class VideoFileStream:
    def __init__(self, file, **kwargs):
        self.vcap = cv2.VideoCapture(file)
        self.stopped = False
        self.fps = FPS(50, 100)
        self.frame_singleton = SingletonBlockingQueue()

    def start(self):
        self.t = Thread(target=self.update, args=())
        self.t.daemon = True
        self.t.start()
        return self

    def update(self):
        while (self.vcap.isOpened()):
            ret, frame = self.vcap.read()
            if not ret:
                log.info('Reached the end of the video!')
                self.stopped = True
            else:
                self.frame_singleton.enqueue(frame, wait=True)
                self.fps.count()

            if self.stopped:
                self.vcap.release()
                return

    def read(self):
        # return the frame most recently read
        return self.frame_singleton.dequeue(notify=True)

    def stop(self):
        self.stopped = True
Exemple #5
0
class StreamingTFObjectDetector(BaseTFObjectDetector):
    def __init__(self, config, output_q):
        super().__init__(config)
        self.output_q = output_q
        self.output_frame = SingletonBlockingQueue()
        self.active_video_feeds = 0

    def process_detection_intermeddiate(self, frame, orig_box, image_path):
        minx, miny, maxx, maxy, label, accuracy = orig_box
        outputFrame = self.tf_detector.DisplayDetection(frame, orig_box)
        if self.config.tf_od_frame_write:
            cv2.imwrite(image_path, outputFrame)
        if self.config.tf_od_annotation_write:
            imH, imW, _ = frame.shape
            writer = pascal_voc_writer.Writer(image_path, imW, imH)
            minX, minY, maxX, maxY, klass, confidence = orig_box
            writer.addObject(klass, minX, minY, maxX, maxY)
            writer.save(image_path.replace('.jpg', '.xml'))
        if self.config.show_fps:
            cv2.putText(outputFrame, "%.2f fps" % self.fps.fps,
                        (10, outputFrame.shape[0] - 10),
                        cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 255, 255), 1)
        self.output_frame.enqueue(outputFrame)

    def process_detection_final(self, label, accuracy, image_path):
        self.output_q.enqueue(
            (NotificationTypes.OBJECT_DETECTED, (label, accuracy, image_path)))

    def generate_output_frames(self):
        self.active_video_feeds += 1
        try:
            while True:
                outputFrame = self.output_frame.read(timeout=2)
                if outputFrame is not None:
                    # encode the frame in JPEG format
                    (flag, encodedImage) = cv2.imencode(".jpg", outputFrame)
                    # ensure the frame was successfully encoded
                    if not flag:
                        continue
                    # yield the output frame in the byte format
                    yield (b'--frame\r\n'
                           b'Content-Type: image/jpeg\r\n\r\n' +
                           bytearray(encodedImage) + b'\r\n')
        finally:
            self.active_video_feeds -= 1
Exemple #6
0
class RTMPVideoStream:
    def __init__(self, rtmp_url, **kwargs):
        log.info("rtmp capture init START")
        self.vcap = cv2.VideoCapture(rtmp_url)
        log.info("rtmp capture init END")
        self.stopped = False
        self.fps = FPS(50, 100)
        self.frame_singleton = SingletonBlockingQueue()

    def start(self):
        self.t = Thread(target=self.update, args=())
        self.t.daemon = True
        self.t.start()
        return self

    def update(self):
        total = 0
        debug_frame = 100
        while (self.vcap.isOpened()):
            ret, frame = self.vcap.read()
            if total % debug_frame == 0:
                log.debug("rtmp capturing..")
            self.frame_singleton.enqueue(frame)
            self.fps.count()
            total += 1

            if self.stopped:
                self.vcap.release()
                return

    def read(self):
        # return the frame most recently read
        return self.frame_singleton.dequeue()

    def stop(self):
        self.stopped = True
Exemple #7
0
class PiVideoStream:
    def __init__(self,
                 resolution=(320, 240),
                 framerate=32,
                 format='rgb',
                 **kwargs):
        # initialize the camera
        self.camera = PiCamera()

        # set camera parameters
        self.camera.resolution = resolution
        self.camera.framerate = framerate

        # set optional camera parameters (refer to PiCamera docs)
        for (arg, value) in kwargs.items():
            setattr(self.camera, arg, value)

        # initialize the stream
        self.rawCapture = PiRGBArray(self.camera, size=resolution)
        self.stream = self.camera.capture_continuous(self.rawCapture,
                                                     format=format,
                                                     use_video_port=True)

        # initialize the frame and the variable used to indicate
        # if the thread should be stopped
        self.stopped = False

        self.fps = FPS(50, 100)
        self.frame_singleton = SingletonBlockingQueue()

    def start(self):
        # start the thread to read frames from the video stream
        self.t = Thread(target=self.update, args=())
        self.t.daemon = True
        self.t.start()
        time.sleep(2.0)
        return self

    def update(self):
        # keep looping infinitely until the thread is stopped
        for f in self.stream:
            # grab the frame from the stream and clear the stream in
            # preparation for the next frame
            self.frame_singleton.enqueue(f.array)
            self.fps.count()
            self.rawCapture.truncate(0)

            # if the thread indicator variable is set, stop the thread
            # and resource camera resources
            if self.stopped:
                self.stream.close()
                self.rawCapture.close()
                self.camera.close()
                return

    def read(self):
        # return the frame most recently read
        return self.frame_singleton.dequeue()

    def stop(self):
        # indicate that the thread should be stopped
        self.stopped = True
Exemple #8
0
 def __init__(self, file, **kwargs):
     self.vcap = cv2.VideoCapture(file)
     self.stopped = False
     self.fps = FPS(50, 100)
     self.frame_singleton = SingletonBlockingQueue()
Exemple #9
0
 def __init__(self, config, output_q):
     super().__init__(config)
     self.output_q = output_q
     self.output_frame = SingletonBlockingQueue()
     self.active_video_feeds = 0
Exemple #10
0
class StreamDetector():
    def __init__(self, config, broker_q, door_detector):
        self.outputFrame = SingletonBlockingQueue()
        self.active_video_feeds = 0
        self.config = config
        self.broker_q = broker_q
        self.door_detector = door_detector
        self.motion_detector = SimpleMotionDetector(config)

    def start(self):
        log.info("TFObjectDetector init START")
        self.od = StreamingTFObjectDetector(self.config, self.broker_q).start()

        if self.config.input_mode == InputMode.RTMP_STREAM:
            from input.rtmpstream import RTMPVideoStream
            self.vs = RTMPVideoStream(self.config.rtmp_stream_url).start()
        elif self.config.input_mode == InputMode.PI_CAM:
            from input.picamstream import PiVideoStream
            self.vs = PiVideoStream(resolution=(640, 480), framerate=30).start()
        elif self.config.input_mode == InputMode.VIDEO_FILE:
            from input.videofilestream import VideoFileStream
            self.vs = VideoFileStream(self.config.video_file_path).start()

        self.od.wait_for_ready()
        log.info("TFObjectDetector init END")

        # start a thread that will perform object detection
        log.info("detect_objects init..")
        t = threading.Thread(target=self.detect_objects)
        t.daemon = True
        t.start()
        return t

    def cleanup(self):
        self.vs.stop()

    def draw_masks(self, frame):
        if self.config.md_mask:
            xmin, ymin, xmax, ymax = self.config.md_mask
            cv2.rectangle(frame, (xmin, ymin), (xmax, ymax), (128, 0, 128), 1)

    def detect_objects(self):
        total = 0

        fps = FPS(50, 100)

        # loop over frames from the video stream
        while True:
            frame = self.vs.read()
            if frame is not None:
                output_frame = frame.copy()
                if self.config.tf_apply_md:
                    output_frame, crop, motion_outside = self.motion_detector.detect(output_frame)
                    if self.config.door_movement_detection:
                        door_state = self.door_detector.detect_door_state(frame, self.config.door_detect_open_door_contour)
                        self.door_detector.add_door_state(door_state)
                        self.door_detector.add_motion_state(motion_outside)
                        if self.config.door_detect_show_detection:
                            minX, minY, maxX, maxY = self.config.door_detect_open_door_contour
                            cv2.rectangle(output_frame, (minX, minY), (maxX, maxY), (0, 255, 0), 1)
                            cv2.putText(output_frame, door_state.name, (minX, minY - 7), cv2.FONT_HERSHEY_SIMPLEX, 0.3, (0, 255, 0), 1)
                    if crop is not None:
                        minX, minY, maxX, maxY = crop
                        cropped_frame = frame[minY:maxY, minX:maxX]
                        self.od.add_task((frame, cropped_frame, (minX, minY)))
                else:
                    self.od.add_task((frame, frame, (0, 0), None))

                self.draw_masks(output_frame)

                fps.count()

                if total % self.config.fps_print_frames == 0:
                    log.info("od=%.2f/md=%.2f/st=%.2f fps" % (self.od.fps.fps, fps.fps, self.vs.fps.fps))
                log.debug("total: %d" % total)
                total += 1

                if self.config.show_fps:
                    cv2.putText(output_frame,
                                "od=%.2f/md=%.2f/st=%.2f fps" % (self.od.fps.fps, fps.fps, self.vs.fps.fps),
                                (10, output_frame.shape[0] - 10),
                                cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 255, 255), 1)
                self.outputFrame.enqueue(output_frame)

            else:
                log.info("frame is NONE")

            if self.config.md_frame_rate > 0:
                time.sleep(1 / self.config.md_frame_rate)
            if self.config.debug_mode:
                ch = getch()
                if ch == 'q':
                    break

    def generate(self):
        self.active_video_feeds += 1
        current_feed_num = self.active_video_feeds
        # loop over frames from the output stream
        try:
            while True:
                if self.config.video_feed_fps > 0:
                    time.sleep(1 / self.config.video_feed_fps)
                output_frame = self.outputFrame.read()
                # encode the frame in JPEG format
                (flag, encodedImage) = cv2.imencode(".jpg", output_frame)
                # ensure the frame was successfully encoded
                if not flag:
                    continue
                # yield the output frame in the byte format
                yield (b'--frame\r\n' b'Content-Type: image/jpeg\r\n\r\n' +
                       bytearray(encodedImage) + b'\r\n')
        finally:
            self.active_video_feeds -= 1
Exemple #11
0
                        mimetype="multipart/x-mixed-replace; boundary=frame")


if __name__ == '__main__':
    ap = argparse.ArgumentParser()
    ap.add_argument("-i", "--ip", type=str, required=True,
                    help="ip address of the device")
    ap.add_argument("-o", "--port", type=int, required=True,
                    help="ephemeral port number of the server (1024 to 65535)")
    ap.add_argument("-c", "--config", type=str, required=True,
                    help="path to the python config file")
    args = vars(ap.parse_args())

    m = importlib.import_module(args["config"])
    config = getattr(m, "Config")()
    broker_q = SingletonBlockingQueue()
    notify_q = SingletonBlockingQueue()
    door_detector = None
    if config.door_movement_detection:
        door_detector = DoorMovementDetector(broker_q, config.door_detect_state_history_length)
    sd = StreamDetector(config, broker_q, door_detector)
    mb = Broker(sd.config, door_detector, broker_q, notify_q)

    log.info("flask init..")
    app = Flask(__name__)
    StreamDetectorView.register(app, init_argument=sd, route_base='/')
    f = threading.Thread(target=app.run, kwargs={'host': args["ip"], 'port': args["port"], 'debug': False,
                                                 'threaded': True, 'use_reloader': False})
    f.daemon = True
    f.start()