Ejemplo n.º 1
0
def detect_video(obj_det, cam_id, out_file):
    """ """
    print ('CAM = ', cam_id)
    cam = utils.WebCam(cam_id, 'caffe_webcam_demo')
    fourcc  = cv2.VideoWriter_fourcc(*'XVID')
    frame   = cam.get_frame()
    h, w, c = frame.shape
    writer  = cv2.VideoWriter(out_file, fourcc, cam.fps, (w, h))
    start_time = time()
    t_start = datetime.now()
    fps = 0
    frame_idx = 0
    while True:
        if utils.Esc_key_pressed(): break
        frame = cam.get_frame()
        if frame is None: break
        bboxes = obj_det(frame)
        utils.draw_detections(frame, bboxes)
        fps += 1
        frame_idx += 1
        if time() - start_time >= 1:
            print ('fps = ', fps / (time() - start_time))
            fps = 0
            start_time = time()
        cv2.imshow(cam.name, frame)
        writer.write(frame)
    
    print ('Elapsed = ' + str(datetime.now() - t_start))
    print ('Frame Idx = ', frame_idx)
    writer.release()
    cam.close()
Ejemplo n.º 2
0
        def twd_cb(frame, bboxes):
            stop_flag = utils.Esc_key_pressed()
            beep_alarm = False
            for box in bboxes:
                box_color = (0, 255, 0) # GREEN
                if self.is_event_detected(box, rule) == True:
                    beep_alarm = True
                    box_color = (0,0,255) # RED
                # Always show the Trip-Wire line on each frame 
                utils.draw_box(frame, box, box_color)
            cv2.line(frame, self.tw_start, self.tw_end, self.tw_color, 3)
            cv2.imshow(self.cam.name, frame)
            self.writer.write(frame)

            # Play a beep Sound if the monitored event is detected
            if beep_alarm == True:
                beep_alarm = False
                if self.beep_on == False:
                    play_sound(True)
                    self.beep_on = True
            else:
                if self.beep_on == True:
                    play_sound(False)
                    self.beep_on = False
            return stop_flag
Ejemplo n.º 3
0
def main():
    """ script entry point """
    parser = argparse.ArgumentParser()
    parser.add_argument('-d', '--detector', type=str, default='yolo')
    parser.add_argument('-f', '--framework', type=str, default='opencv-caffe')
    parser.add_argument('-v', '--video', type=str, default='')
    args = vars(parser.parse_args())

    framework  = args['framework']
    detector   = args['detector']
    det        = get_obj_detector(detector, framework)

    if args['video']:
        video = args['video']
    else:
        video = 0

    streamer = LiveDetector(det, video)
    streamer.start()
    while True:
        if utils.Esc_key_pressed(): break
        if streamer.stopped == True: 
            print ('STOPPED')
            break
    streamer.stop()
Ejemplo n.º 4
0
def start_live_stream(cam_id):
    """ """
    streamer = LiveStreamer(cam_id)
    streamer.start()
    while True:
        if utils.Esc_key_pressed(): break
        if streamer.stopped == True: break
    streamer.stop()
Ejemplo n.º 5
0
    def run(self, rule='bidirectional', batch=1):
        """ """
        start_time = time()
        fps = 0
        self.detector = TFNet(self.yolo_args)
        colors = self.detector.meta['colors']
        beep_on = False
        while True:
            # Terminate if Esc is pressed
            if utils.Esc_key_pressed(): break

            # Read Frame and run the Object Detector
            frame = self.cam.get_frame()
            bboxes = self.detector.detect(frame)
            fps += 1
            if time() - start_time >= 1:
                print('fps = ', fps / (time() - start_time))
                fps = 0
                start_time = time()

            beep_alarm = False
            f_idx = 0
            for box in bboxes:
                left, right, top, bot, mess, label, confidence = box
                rect_color = colors[label]
                rect_thick = 1
                if self.is_event_detected(box, rule) == True:
                    beep_alarm = True
                    rect_color = (0, 0, 255)
                    rect_thick = 3
                cv2.rectangle(frame, (left, top), (right, bot), rect_color,
                              rect_thick)
                cv2.putText(frame, mess, (left, top - 18), 0, 1e-3 * self.h,
                            rect_color, 2)

                # Always show the Trip-Wire line on each frame
                cv2.line(frame, self.tw_start, self.tw_end, self.tw_color, 3)
                cv2.line(frame, self.tw_start, self.tw_end, self.tw_color, 3)
                cv2.line(frame, self.tw_start, self.tw_end, self.tw_color, 3)
                cv2.imshow(self.cam_name, frame)
                writer.write(frame)

                # Play a beep Sound if the monitored event is detected
                if beep_alarm == True:
                    beep_alarm = False
                    if beep_on == False:
                        winsound.PlaySound(
                            BEEP_SOUND_FILE,
                            winsound.SND_ASYNC | winsound.SND_LOOP)
                        beep_on = True
                else:
                    if beep_on == True:
                        winsound.PlaySound(None, winsound.SND_ASYNC)
                        beep_on = False

        writer.release()
Ejemplo n.º 6
0
    def call_th(self, frame):
        stop = False
        if utils.Esc_key_pressed():
            stop = True
            self.fps.stop()
            print("Elapsed = {:.2f}".format(self.fps.elapsed()))
            print("FPS     = {:.2f}".format(self.fps.fps()))
            preproc = None
            frame = None
            self.stopped = True
        else:
            preproc = self.preprocess(frame)

        self.inp_q.put((frame, preproc))
        return stop
Ejemplo n.º 7
0
def live_stream(cam_id):
    """ """
    win_name = 'Pi Live Streaming'
    cam, cap = init_pi_cam(win_name)
    t_start = datetime.now()
    fps = FPS().start()
    for pi_frame in cam.capture_continuous(cap, format="bgr", use_video_port = True):
        frame = pi_frame.array
        fps.update()
        cv2.imshow(win_name, frame)
        cap.truncate(0)
        if utils.Esc_key_pressed(): break
    
    fps.stop()
    print("Elapsed = {:.2f}".format(fps.elapsed()))
    print("FPS     = {:.2f}".format(fps.fps()))    
Ejemplo n.º 8
0
    def __call__(self):
        """ """
        def mouse_cb(event, x, y, flags, param):
            self.draw_line(event, (x,y))

        cv2.setMouseCallback(self.winname, mouse_cb)
        while True:
            cv2.imshow(self.winname, self.frame)
            if utils.Esc_key_pressed():
                done = True
                break
        # Always set start point to lower y coordinate
        if self.end_pt[1] < self.start_pt[1]:
            tmp = self.start_pt
            self.start_pt = self.end_pt
            self.end_pt   = tmp
        return self.start_pt, self.end_pt
Ejemplo n.º 9
0
    def call_noth(self, frame):
        print('Time = ', datetime.now() - self.last_call)
        stop = False
        if utils.Esc_key_pressed():
            stop = True
            self.fps.stop()
            print("Elapsed = {:.2f}".format(self.fps.elapsed()))
            print("FPS     = {:.2f}".format(self.fps.fps()))

        # Preprocess Current frame
        if self.started is True:
            self.graph.LoadTensor(self.preproc, 'image')

        # Process output of the previous frame
        if self.net_out is not None:
            out_shape = self.net_out.shape
            if self.model['out_size'] is not None:
                out_size = self.model['out_size']
                self.net_out = self.net_out.reshape(out_size)
                self.net_out = np.transpose(self.net_out, [2, 0, 1])
            self.net_out = self.net_out.astype(np.float32)
            self.bboxes = yolo_get_candidate_objects(self.net_out,
                                                     self.in_frame.shape)
            utils.draw_detections(self.prev_frame, self.bboxes)
            cv2.imshow(self.win_name, self.prev_frame)
            self.fps.update()

        # Set the nest input frame
        t1 = datetime.now()
        self.prev_frame = self.in_frame
        self.in_frame = frame
        self.preproc = self.preprocess(self.in_frame)
        print('pre_proc = ', datetime.now() - t1)

        # Run network on the current frame
        self.net_out = None
        if self.started is True:
            while self.net_out is None:
                self.net_out, obj = self.graph.GetResult()
        else:
            self.started = True
            self.fps = FPS().start()

        self.last_call = datetime.now()
        return stop
Ejemplo n.º 10
0
def live_stream(cam_id):
    """ """
    # cam = utils.WebCam(cam_id, 'Live Streaming')
    cam = utils.IPCam('https://www.youtube.com/watch?v=psfFJR3vZ78',
                      'Live Stream')
    t_start = datetime.now()
    fps = FPS().start()
    while True:
        if utils.Esc_key_pressed(): break
        frame = cam.get_frame()
        fps.update()
        if frame is None: break
        cv2.imshow(cam.name, frame)

    fps.stop()
    print("Elapsed = {:.2f}".format(fps.elapsed()))
    print("FPS     = {:.2f}".format(fps.fps()))
    cam.close()
Ejemplo n.º 11
0
    def __call__(self, frame):
        stop = False
        if utils.Esc_key_pressed():
            stop = True
            self.fps.stop()
            print("Elapsed = {:.2f}".format(self.fps.elapsed()))
            print("FPS     = {:.2f}".format(self.fps.fps()))

            if self.use_threading is True:
                self.out_q.put(None)
                self.frame_q.put(None)
        else:
            in_frame = self.detector.preprocess(frame)
            net_out = self.detector.detect(in_frame)
            self.fps.update()
            if self.use_threading is True:
                self.frame_q.put(frame)
                self.out_q.put(net_out)
            else:
                self.bboxes = self.detector.get_bboxes(frame, net_out)
                self.detection_cb(frame, self.bboxes)

        return stop
Ejemplo n.º 12
0
    def __call__(self, frame):
        ret = False
        if utils.Esc_key_pressed():
            ret = True
            self.fps.stop()
            print("Elapsed = {:.2f}".format(self.fps.elapsed()))
            print("FPS     = {:.2f}".format(self.fps.fps()))
            self.out_q.put(None)
            self.frame_q.put(None)
            self.in_q.put(None)
        else:
            if self.use_threading is True:
                self.frame_q.put(frame)
                frame = self.detector.preprocess(frame)
                self.in_q.put(frame)
            else:
                bboxes = self.detector(frame)
                utils.draw_detections(frame, bboxes)
                cv2.imshow(self.name, frame)
                # self.writer.write(frame)
                self.fps.update()

        return ret